<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Plant Sci.</journal-id>
<journal-title>Frontiers in Plant Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Plant Sci.</abbrev-journal-title>
<issn pub-type="epub">1664-462X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpls.2025.1620971</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Plant Science</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>The PLSR-ML fusion strategy for high-accuracy leaf potassium inversion in karst region of Southwest China</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name>
<surname>Song</surname>
<given-names>Zhihao</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/3048776/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>He</surname>
<given-names>Wen</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="author-notes" rid="fn001">
<sup>*</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Yao</surname>
<given-names>Yuefeng</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2806793/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Yu</surname>
<given-names>Ling</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Huang</surname>
<given-names>Jinjun</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Xu</surname>
<given-names>Yong</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1717531/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Wang</surname>
<given-names>Haoyu</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
</contrib-group>
<aff id="aff1">
<sup>1</sup>
<institution>College of Geomatics and Geoinformation, Guilin University of Technology</institution>, <addr-line>Guilin</addr-line>,&#xa0;<country>China</country>
</aff>
<aff id="aff2">
<sup>2</sup>
<institution>Guangxi Key Laboratory of Plant Conservation and Restoration Ecology in Karst Terrain, Guangxi Institute of Botany, Guangxi Zhuang Autonomous Region and Chinese Academy of Sciences</institution>, <addr-line>Guilin</addr-line>,&#xa0;<country>China</country>
</aff>
<aff id="aff3">
<sup>3</sup>
<institution>School of Computer Science and Engineering, Guilin University of Aerospace Technology</institution>, <addr-line>Guilin</addr-line>,&#xa0;<country>China</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>Edited by: Yu Fenghua, Shenyang Agricultural University, China</p>
</fn>
<fn fn-type="edited-by">
<p>Reviewed by: Renato Herrig Furlanetto, University of Florida, United States</p>
<p>Haijun Qi, Guangdong Academy of Agricultural Sciences (GDAAS), China</p>
</fn>
<fn fn-type="corresp" id="fn001">
<p>*Correspondence: Wen He, <email xlink:href="mailto:hw@gxib.cn">hw@gxib.cn</email>
</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>07</day>
<month>07</month>
<year>2025</year>
</pub-date>
<pub-date pub-type="collection">
<year>2025</year>
</pub-date>
<volume>16</volume>
<elocation-id>1620971</elocation-id>
<history>
<date date-type="received">
<day>30</day>
<month>04</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>17</day>
<month>06</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2025 Song, He, Yao, Yu, Huang, Xu and Wang</copyright-statement>
<copyright-year>2025</copyright-year>
<copyright-holder>Song, He, Yao, Yu, Huang, Xu and Wang</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<p>Potassium is a critical macronutrient for plant growth, yet accurately and rapidly estimating its content in karst regions remains challenging due to complex terrestrial conditions. To address this, we collected leaf potassium content and reflectance data from 301 plant samples across nine karst regions in Guangxi Province. Our results showed that hybrid models combining Partial Least Squares Regression (PLSR) with three machine learning algorithms&#x2014;Random Forest (RF), Extreme Gradient Boosting (XGBoost), and Multi-Layer Perceptron (MLP)&#x2014;namely PLSR-RF, PLSR-XGBoost, and PLSR-MLP, demonstrated exceptional accuracy in estimating leaf potassium content. Validation coefficient of determination (R&#xb2;) values reached 0.89, 0.94, and 0.96, respectively&#x2014;representing improvements of 206%, 147%, and 108% over standalone algorithms. This performance gain was attributed to rigorous overfitting control: PLSR&#x2019;s dimensionality reduction synergized with ensemble machine learning (RF, XGBoost, MLP) to eliminate redundant spectral features while retaining predictive signals. Furthermore, fractional differentiation preprocessing significantly improved the correlation between spectral reflectance and potassium content, enhancing model robustness. Two spectral regions (700&#x2013;1100 nm, 1400&#x2013;1800 nm) were identified as key predictors, aligning with known potassium-related biochemical absorption features. Collectively, the integration of these strategies offers a robust framework for nutrient monitoring in ecologically fragile karst ecosystems.</p>
</abstract>
<kwd-group>
<kwd>karst region</kwd>
<kwd>leaf potassium content</kwd>
<kwd>machine learning</kwd>
<kwd>fractional differentiation</kwd>
<kwd>spectral reflectance</kwd>
</kwd-group>
<counts>
<fig-count count="7"/>
<table-count count="1"/>
<equation-count count="8"/>
<ref-count count="71"/>
<page-count count="15"/>
<word-count count="7308"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-in-acceptance</meta-name>
<meta-value>Plant Nutrition</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>The karst landscapes of southwestern China constitute a globally significant geomorphological system (<xref ref-type="bibr" rid="B14">D&#x2019;Ettorre et&#xa0;al., 2024</xref>). Characterized by distinctive lithological structures and heterogeneous vegetation assemblages, this ecologically fragile region serves as a vital reservoir of terrestrial biodiversity. Nevertheless, intensive anthropogenic activities&#x2014;particularly shifting slash-and-burn agriculture and unsustainable slope farming practices&#x2014;have induced substantial degradation of surface vegetation cover (<xref ref-type="bibr" rid="B30">Jiang et&#xa0;al., 2014</xref>). This degradation may adversely affect the availability of essential nutrient such as potassium, which plays a critical role in plant growth by regulating water balance, facilitating nutrient transport (<xref ref-type="bibr" rid="B58">Wang et&#xa0;al., 2013</xref>; <xref ref-type="bibr" rid="B25">Hasanuzzaman et&#xa0;al., 2018</xref>), and enhances plant resilience against biotic and abiotic stressors (<xref ref-type="bibr" rid="B3">Ansch&#xfc;tz et&#xa0;al., 2014</xref>). Foliar potassium concentration serves as a robust phytochemical indicator strongly correlated with plant physiological status, providing critical insights into vegetation health assessment. Consequently, precise quantification of foliar potassium levels emerges as a methodological imperative for elucidating plant adaptive strategies in degraded karst ecosystems.</p>
<p>Conventional laboratory methods for leaf potassium analysis, relying on destructive wet chemistry techniques, face inherent limitations in operational efficiency and scalability. In contrast, hyperspectral reflectance technology has emerged as a transformative non-destructive solution, enabling rapid <italic>in situ</italic> nutrient assessment through advanced spectroscopic platforms. The integration of hyperspectral remote sensing systems has particularly enhanced real-time potassium monitoring capabilities, demonstrating remarkable success in precision agriculture applications (<xref ref-type="bibr" rid="B36">Lin et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B5">Azadnia et&#xa0;al., 2023</xref>). However, species-specific variations result in differing spectral band sensitivities to potassium content across plant taxa. Current research has yet to fully resolve uncertainties in characterizing potassium-related spectral responses, necessitating further investigation into their underlying mechanisms. For example, <xref ref-type="bibr" rid="B39">Lu et&#xa0;al. (2020)</xref> found that the spectral reflectance of rice leaves in the shortwave infrared region (1300&#x2013;2000 nm) is particularly sensitive to potassium content. Similarly, <xref ref-type="bibr" rid="B40">Lyu et&#xa0;al. (2023)</xref> identified potassium-sensitive bands in grape leaves at 410 nm, 490&#x2013;500 nm, and 1242 nm. These discrepancies between rice and grape studies underscore the variability in potassium-sensitive spectral regions across species, highlighting both the challenges in universal band selection and the critical need for taxa-specific calibration. This variability becomes particularly relevant in ecologically unique regions such as karst landscapes, which host specialized plant communities. Karst-adapted species exhibit distinct spectral signatures compared to non-karst flora due to their divergent evolutionary adaptations and environmental stressors (<xref ref-type="bibr" rid="B66">Yue et&#xa0;al., 2010</xref>). Consequently, region-specific studies are imperative to map the spectral sensitivity patterns of leaf potassium in karst ecosystems, enabling accurate nutrient monitoring and supporting ecological conservation in these biodiverse yet fragile habitats.</p>
<p>While spectral information enables precise characterization of potassium signatures in leaf spectral response curves, noise interference remains a significant concern (<xref ref-type="bibr" rid="B59">Xie et&#xa0;al., 2020</xref>). Hyperspectral data acquisition is inherently susceptible to artifacts introduced by sample properties (e.g., particle size and surface texture) and environmental variability (<xref ref-type="bibr" rid="B32">Kong et&#xa0;al., 2023</xref>). Spectral differentiation transformations serve as a robust preprocessing technique to mitigate background noise and unwanted spectral reflectance variations. These transformations enhance spectral sensitivity, amplify diagnostic features, and optimize predictive model performance (<xref ref-type="bibr" rid="B61">Yang C, et al., 2021</xref>). First- and second-order derivatives are widely employed to improve spectral signal-to-noise ratios. However, integer-order differentiation exhibits limitations in resolving subtle spectral features when curvature variations are gradual, often leading to feature loss (<xref ref-type="bibr" rid="B34">Li et&#xa0;al., 2024</xref>). In contrast, fractional differentiation operates at finer computational intervals, enabling enhanced spectral information extraction from <italic>in situ</italic> leaf measurements (<xref ref-type="bibr" rid="B7">Benkhettou et&#xa0;al., 2015</xref>). In addition, using fractional differentiation can further sharpen peak shapes and perform better in detecting subtle signal changes in positive and negative spectral peaks (<xref ref-type="bibr" rid="B53">Tan et&#xa0;al., 2024</xref>). This methodological refinement directly translates to improved precision in estimating critical biochemical parameters, such as foliar potassium levels, which will be rigorously evaluated in our experimental framework.</p>
<p>In the field of nutrient content inversion, mainstream empirical approaches can be broadly categorized into four types (<xref ref-type="bibr" rid="B9">Berger et&#xa0;al., 2020</xref>): empirical parameter regression (<xref ref-type="bibr" rid="B29">Jay et&#xa0;al., 2017</xref>), linear nonparametric regression (<xref ref-type="bibr" rid="B22">Furlanetto et&#xa0;al., 2024</xref>), physically based methods, and nonlinear nonparametric regression (i.e., machine learning) (<xref ref-type="bibr" rid="B21">Furlanetto et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B20">Flynn et&#xa0;al., 2023</xref>). Empirical parameter regression typically employs narrowband vegetation indices (e.g., NIR/SWIR combinations) for rapid estimation of nutrients. However, due to the lack of distinct absorption features for certain elements and the influence of spectral signal coupling, these methods often suffer from limited generalizability (<xref ref-type="bibr" rid="B35">Li et&#xa0;al., 2021</xref>). Linear nonparametric regression techniques such as Partial Least Squares Regression (PLSR) and Principal Component Regression (PCR) utilize full-spectrum information and avoid manual feature selection. Still, their reliance on linear assumptions makes it difficult to capture the complex nonlinear relationships between spectral responses and plant biochemical properties (<xref ref-type="bibr" rid="B4">Atzberger et&#xa0;al., 2010</xref>). Physically based radiative transfer models (e.g., PROSAIL) aim to simulate the nutrient&#x2013;spectrum relationship from a mechanistic perspective. Nonetheless, the weak absorption features of nutrients can be easily confounded with canopy water content and structural parameters, leading to ill-posed inversion problems (<xref ref-type="bibr" rid="B19">F&#xe9;ret et&#xa0;al., 2019</xref>). In contrast, machine learning methods are well-suited for nutrient estimation due to their strong capabilities in modeling complex nonlinear relationships and handling large-scale datasets (<xref ref-type="bibr" rid="B26">He et&#xa0;al., 2021</xref>).</p>
<p>However, significant challenges in model fitting persist when applying machine learning algorithms to vegetation parameter estimation (<xref ref-type="bibr" rid="B15">Doktor et&#xa0;al., 2014</xref>). The performance of machine learning models critically depends on feature selection - excessively large feature sets or overcomplicated architectures frequently lead to overfitting, compromising both training accuracy and model generalizability. Conversely, insufficient feature quantities and oversimplified models may result in underfitting. This issue is particularly pronounced in field spectroscopy data characterized by high dimensionality and multicollinearity. To address these challenges, researchers have implemented multiple mitigation strategies: (1) expanding training datasets to improve statistical representation; (2) employing dimensionality reduction techniques; (3) adopting robust cross-validation protocols; (4) applying regularization methods (<xref ref-type="bibr" rid="B67">Zhang et&#xa0;al., 2021</xref>); and (5) developing ensemble learning frameworks (<xref ref-type="bibr" rid="B55">Wang R, et&#xa0;al., 2020</xref>).</p>
<p>Hyperspectral data is characterized by high dimensionality and multivariate features, and the issue of feature redundancy has yet to be effectively resolved (<xref ref-type="bibr" rid="B37">Liu et&#xa0;al., 2021</xref>). This necessitates systematic dimensionality reduction of hyperspectral data to ensure model robustness. Notable implementations include <xref ref-type="bibr" rid="B12">Cao et&#xa0;al. (2021)</xref>, who successfully mitigated overfitting in maize leaf nitrogen estimation through optimized spectral compression, and <xref ref-type="bibr" rid="B42">Ni et&#xa0;al. (2024)</xref> achieving superior predictive performance (R&#xb2;=0.98) in sucrose quantification models via principal component analysis (PCA). This empirical evidence collectively substantiates that dimensionality reduction techniques, particularly PCA, significantly enhance both model accuracy (p&lt;0.01) and algorithmic stability compared to untreated hyperspectral inputs. Building upon these methodological advancements, our study innovatively integrates partial least squares (PLS)-optimized PCA with ensemble machine learning frameworks to establish a robust estimation model for leaf potassium content in karst ecosystems, specifically designed to improve generalizability across heterogeneous geological environments.</p>
<p>Based on field spectrometer data, this study used a fractional differential spectroscopy method combined with multiple models to estimate the potassium content in the leaves of mixed forests in the Guangxi karst region. The main objectives of this research are as follows: (1) To assess the distribution of wavelengths sensitive to potassium content in plant leaves in the karst region; (2) To explore the role of fractional differentiation in estimating potassium content in karst plant leaves based on spectroradiometer data; and (3) To investigate whether combined models can overcome the overfitting issues encountered in machine learning models when estimating potassium content in karst plant leaves.</p>
</sec>
<sec id="s2" sec-type="materials|methods">
<label>2</label>
<title>Materials and methods</title>
<sec id="s2_1">
<label>2.1</label>
<title>Study area</title>
<p>The investigation was conducted in the karst-dominated terrain of Guangxi Zhuang Autonomous Region, Southwest China (20&#xb0;54&#x2032;-26&#xb0;24&#x2032;N, 104&#xb0;28&#x2032;-112&#xb0;04&#x2032;E; <xref ref-type="fig" rid="f1">
<bold>Figure&#xa0;1</bold>
</xref>). This geomorphologically complex area exhibits altitudinal gradients ranging from coastal plains (0&#xa0;m) to montane systems (2141&#xa0;m ASL), bisected by the Tropic of Cancer and bounded by tropical marine systems to the south. These latitudinal and topographic configurations engender a monsoonal climate regime with pronounced seasonality, manifesting in mean annual temperatures of 17.5-23.5&#xb0;C and precipitation gradients from 841.2&#xa0;mm (leeward basins) to 3387.5&#xa0;mm (windward slopes). Nine standardized plots (200 m&#xb2; each) were established across karst terrains, covering three vegetation succession stages: primary forests, secondary forests, and shrublands. This stratified design effectively captures karst ecosystem heterogeneity.</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>Location of the nine sample plots.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1620971-g001.tif">
<alt-text content-type="machine-generated">Map of a region in southern China highlighting experimental plots in Jingxi, Longzhou, Pingguo, Du'an, Liujiang, Huanjiang, Lingui, Fuchuan, and Quanzhou. Green areas represent karst regions, while gray areas denote non-karst regions. An inset map shows the location within China. A scale and compass are included.</alt-text>
</graphic>
</fig>
</sec>
<sec id="s2_2">
<label>2.2</label>
<title>Data collection</title>
<p>Longitudinal foliar sampling spanned July 2018 to September 2020 across all study plots. Within each plot, phyllosphere specimens were systematically collected from 8&#x2013;15 dominant species, establishing a comprehensive karst flora spectral database comprising 301 samples representing 37 families, 59 genera, and 70 species. To ensure spatial representativeness, sampling followed triaxial orientation protocols (0&#xb0;[N], 120&#xb0;, and 240&#xb0;) within the horizontal plane.</p>
<p>Spectral acquisition employed a high-resolution field spectroradiometer (Fieldspec4, ASD Inc., USA) with 3 nm VNIR (350&#x2013;1000 nm) and 8 nm SWIR (1001&#x2013;2500 nm) spectral resolution (<xref ref-type="bibr" rid="B50">Shah et&#xa0;al., 2019</xref>). Three photometric replicates per tree were obtained through standardized protocol: 1) periodic radiometric calibration (10-minute intervals) using integrated reference panels; 2) constrained by field operation limitations (4-hour battery endurance), two mature leaves per branch underwent non-destructive scanning; 3) branch-level spectral signatures were averaged to derive tree-specific reflectance profiles.</p>
<p>Post-spectral analysis, target leaves were immediately preserved in sterile bags (Whirl-Pak<sup>&#xae;</sup>) under controlled conditions (ICERSICE940 incubator, 4&#xb0;C). Samples underwent laboratory processing within 24&#xa0;h: 1) oven-drying at 75&#xb0;C to constant mass; 2) mechanical homogenization to 100-mesh particle size; 3) quantitative potassium determination via flame photometric analysis (Sherwood 410, &#xb1; 0.01 ppm detection limit) following standard digestion protocols (<xref ref-type="bibr" rid="B48">Reddy and Veeranki, 2013</xref>).</p>
</sec>
<sec id="s2_3">
<label>2.3</label>
<title>Methodology</title>
<sec id="s2_3_1">
<label>2.3.1</label>
<title>Fractional differentiation</title>
<p>The fractional differentiation extends the concept of traditional integer-order differentiation to any arbitrary order, enabling continuous interpolation between integer orders (<xref ref-type="bibr" rid="B27">Hong et&#xa0;al., 2019</xref>). This method emphasizes subtle changes in spectral information (<xref ref-type="bibr" rid="B57">Wang Z, et&#xa0;al., 2020</xref>). Currently, the classic definitions of fractional differentiation include Riemann-Liouville (R-L), Gr&#xfc;nwald-Letnikov (G-L), and Caputo (<xref ref-type="bibr" rid="B46">Pu et&#xa0;al., 2008</xref>; <xref ref-type="bibr" rid="B56">Wang et&#xa0;al., 2018</xref>). In this study, the Gr&#xfc;nwald-Letnikov (G-L) definition was mainly adopted to derive the differentiation to the n-th order, as shown in <xref ref-type="disp-formula" rid="eq1">Equation 1</xref>.</p>
<disp-formula id="eq1">
<label>(1)</label>
<mml:math display="block" id="M1">
<mml:mrow>
<mml:msup>
<mml:mi>d</mml:mi>
<mml:mi>v</mml:mi>
</mml:msup>
<mml:mi>f</mml:mi>
<mml:mo stretchy="false">(</mml:mo>
<mml:mi>x</mml:mi>
<mml:mo stretchy="false">)</mml:mo>
<mml:mo>=</mml:mo>
<mml:munder>
<mml:mrow>
<mml:mi>l</mml:mi>
<mml:mi>i</mml:mi>
<mml:mi>m</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mi>h</mml:mi>
<mml:mo>&#x2192;</mml:mo>
<mml:mi>&#x221e;</mml:mi>
</mml:mrow>
</mml:munder>
<mml:mfrac>
<mml:mn>1</mml:mn>
<mml:mrow>
<mml:msup>
<mml:mi>h</mml:mi>
<mml:mi>v</mml:mi>
</mml:msup>
</mml:mrow>
</mml:mfrac>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>m</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>0</mml:mn>
</mml:mrow>
<mml:mrow>
<mml:mfrac>
<mml:mrow>
<mml:mi>t</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>a</mml:mi>
</mml:mrow>
<mml:mi>h</mml:mi>
</mml:mfrac>
</mml:mrow>
</mml:msubsup>
<mml:msup>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mi>m</mml:mi>
</mml:msup>
<mml:mfrac>
<mml:mrow>
<mml:mi>&#x393;</mml:mi>
<mml:mo stretchy="false">(</mml:mo>
<mml:mi>v</mml:mi>
<mml:mo>+</mml:mo>
<mml:mn>1</mml:mn>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mrow>
<mml:mi>m</mml:mi>
<mml:mo>!</mml:mo>
<mml:mi>&#x393;</mml:mi>
<mml:mo stretchy="false">(</mml:mo>
<mml:mi>v</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>m</mml:mi>
<mml:mo>+</mml:mo>
<mml:mn>1</mml:mn>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:mfrac>
<mml:mi>f</mml:mi>
<mml:mo stretchy="false">(</mml:mo>
<mml:mi>x</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>m</mml:mi>
<mml:mi>h</mml:mi>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:math>
</disp-formula>
<p>where v is the order of differentiation, h is the step size, t and a are the upper and lower bounds of differentiation, respectively, and &#x393;(&#x22c5;) is the Gamma function, as defined in <xref ref-type="disp-formula" rid="eq2">Equation 2</xref>.</p>
<disp-formula id="eq2">
<label>(2)</label>
<mml:math display="block" id="M2">
<mml:mrow>
<mml:mtext>&#x393;</mml:mtext>
<mml:mo stretchy="false">(</mml:mo>
<mml:mi>&#x3b2;</mml:mi>
<mml:mo stretchy="false">)</mml:mo>
<mml:mo>=</mml:mo>
<mml:msubsup>
<mml:mo>&#x222b;</mml:mo>
<mml:mn>0</mml:mn>
<mml:mi>&#x221e;</mml:mi>
</mml:msubsup>
<mml:msup>
<mml:mi>e</mml:mi>
<mml:mrow>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>t</mml:mi>
</mml:mrow>
</mml:msup>
<mml:msup>
<mml:mi>t</mml:mi>
<mml:mrow>
<mml:mi>&#x3b2;</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msup>
<mml:mi>d</mml:mi>
<mml:mi>t</mml:mi>
<mml:mo>=</mml:mo>
<mml:mo stretchy="false">(</mml:mo>
<mml:mi>&#x3b2;</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
<mml:mo stretchy="false">)</mml:mo>
<mml:mo>!</mml:mo>
</mml:mrow>
</mml:math>
</disp-formula>
<p>where <italic>&#x3b2;</italic> is an arbitrary variable. In this study, the leaf spectra were differentiated within the range of 0 to 3 orders (at intervals of 0.1 order).</p>
</sec>
<sec id="s2_3_2">
<label>2.3.2</label>
<title>Partial least squares regression</title>
<p>Partial Least Squares Regression (PLSR) is a multivariate data analysis technique that combines the features of Principal Component Analysis (PCA) and Multiple Linear Regression (MLR). It is used to predict a set of dependent variables from a large number of collinear independent variables. This method assumes that the datasets of independent and dependent variables are <inline-formula>
<mml:math display="inline" id="im1">
<mml:mrow>
<mml:mi>Z</mml:mi>
<mml:mo>=</mml:mo>
<mml:msub>
<mml:mrow>
<mml:mo stretchy="false">[</mml:mo>
<mml:msub>
<mml:mi>z</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
<mml:mo>,</mml:mo>
<mml:msub>
<mml:mi>z</mml:mi>
<mml:mn>2</mml:mn>
</mml:msub>
<mml:mo>,</mml:mo>
<mml:mo>&#x22ef;</mml:mo>
<mml:msub>
<mml:mi>z</mml:mi>
<mml:mi>k</mml:mi>
</mml:msub>
<mml:mo stretchy="false">]</mml:mo>
</mml:mrow>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#xd7;</mml:mo>
<mml:mi>k</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> and <inline-formula>
<mml:math display="inline" id="im2">
<mml:mrow>
<mml:mi>Q</mml:mi>
<mml:mo>=</mml:mo>
<mml:msub>
<mml:mrow>
<mml:mo stretchy="false">[</mml:mo>
<mml:mi>q</mml:mi>
<mml:mo stretchy="false">]</mml:mo>
</mml:mrow>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#xd7;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>, respectively. First, the first latent variable <italic>f<sub>1</sub>
</italic>&#x200b; is extracted from Z, which is a linear combination of <inline-formula>
<mml:math display="inline" id="im3">
<mml:mrow>
<mml:msub>
<mml:mi>z</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
<mml:mo>,</mml:mo>
<mml:msub>
<mml:mi>z</mml:mi>
<mml:mn>2</mml:mn>
</mml:msub>
<mml:mo>,</mml:mo>
<mml:mo>&#x22ef;</mml:mo>
<mml:msub>
<mml:mi>z</mml:mi>
<mml:mi>k</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>, and maximizes the correlation with the dependent variable <italic>Q</italic>. Then, a regression model is established for <italic>Q</italic> using <italic>f<sub>1</sub>
</italic>&#x200b;&#x200b;. If the regression equation achieves the desired accuracy, component extraction is stopped; otherwise, the next component is extracted until the model reaches a satisfactory level of accuracy. The regression model is given by <xref ref-type="disp-formula" rid="eq3">Equation 3</xref>, and each latent variable is defined as shown in <xref ref-type="disp-formula" rid="eq4">Equation 4</xref>:</p>
<disp-formula id="eq3">
<label>(3)</label>
<mml:math display="block" id="M3">
<mml:mrow>
<mml:mi>q</mml:mi>
<mml:mo>=</mml:mo>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
<mml:msub>
<mml:mi>a</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
<mml:mo>+</mml:mo>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mn>2</mml:mn>
</mml:msub>
<mml:msub>
<mml:mi>a</mml:mi>
<mml:mn>2</mml:mn>
</mml:msub>
<mml:mo>+</mml:mo>
<mml:mo>&#x22ef;</mml:mo>
<mml:mo>+</mml:mo>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mi>k</mml:mi>
</mml:msub>
<mml:msub>
<mml:mi>a</mml:mi>
<mml:mi>k</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</disp-formula>
<disp-formula id="eq4">
<label>(4)</label>
<mml:math display="block" id="M4">
<mml:mrow>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mi>m</mml:mi>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:msub>
<mml:mi>w</mml:mi>
<mml:mrow>
<mml:mi>m</mml:mi>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>z</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
<mml:mo>+</mml:mo>
<mml:msub>
<mml:mi>w</mml:mi>
<mml:mrow>
<mml:mi>m</mml:mi>
<mml:mn>2</mml:mn>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>z</mml:mi>
<mml:mn>2</mml:mn>
</mml:msub>
<mml:mo>+</mml:mo>
<mml:mo>&#x22ef;</mml:mo>
<mml:mo>+</mml:mo>
<mml:msub>
<mml:mi>w</mml:mi>
<mml:mrow>
<mml:mi>m</mml:mi>
<mml:mi>k</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>z</mml:mi>
<mml:mi>k</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</disp-formula>
<p>where <italic>m</italic> is the number of principal components, <italic>k</italic> is the number of independent variables, <italic>a</italic> is the regression coefficient of <italic>y</italic> with respect to <italic>f</italic>, and <italic>w</italic> is the linear coefficient of <italic>f</italic> with respect to <italic>z</italic>.</p>
</sec>
<sec id="s2_3_3">
<label>2.3.3</label>
<title>Random forest</title>
<p>Random Forest (RF) is a machine learning algorithm based on decision trees (<xref ref-type="bibr" rid="B11">Breiman, 2001</xref>). RF resamples multiple samples from the training dataset and constructs a decision tree for each sample. Finally, the output value is calculated as the average of the predictions from all decision trees (<xref ref-type="bibr" rid="B61">Yang T, et&#xa0;al., 2021</xref>). RF has two important parameters: the number of trees and the number of features considered for splitting at each node. Initially, the number of decision trees was set to 50, and was then gradually increased in steps of 50 until it reached 200. The feature parameters for each node split were set as sqrt, log2, and 10. The optimal parameters were determined through grid search (<xref ref-type="bibr" rid="B62">Yang and Shami, 2020a</xref>).</p>
</sec>
<sec id="s2_3_4">
<label>2.3.4</label>
<title>Extreme gradient boosting</title>
<p>XGBoost is an improved algorithm based on Gradient Boosted Decision Trees (GBDT), proposed by <xref ref-type="bibr" rid="B13">Chen and Guestrin (2016)</xref>, which efficiently constructs boosted trees and supports parallel computation. Compared with traditional GBDT, which only utilizes first-order differential information, XGBoost performs a second-order Taylor expansion on the loss function, thereby improving the efficiency of finding the optimal solution. Finally, XGBoost obtains the overall prediction by summing the predictions of multiple decision trees, as shown in <xref ref-type="disp-formula" rid="eq5">Equation 5</xref>.</p>
<disp-formula id="eq5">
<label>(5)</label>
<mml:math display="block" id="M5">
<mml:mrow>
<mml:msub>
<mml:mover accent="true">
<mml:mi>y</mml:mi>
<mml:mo>^</mml:mo>
</mml:mover>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mi>n</mml:mi>
<mml:mi>M</mml:mi>
</mml:msubsup>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mi>m</mml:mi>
</mml:msub>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mi>,</mml:mi>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mi>m</mml:mi>
</mml:msub>
<mml:mo>&#x2208;</mml:mo>
<mml:mi>F</mml:mi>
</mml:mrow>
</mml:math>
</disp-formula>
<p>where <inline-formula>
<mml:math display="inline" id="im4">
<mml:mrow>
<mml:msub>
<mml:mover accent="true">
<mml:mi>y</mml:mi>
<mml:mo>^</mml:mo>
</mml:mover>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> represents the final predicted value of the model, <italic>M</italic> denotes the number of combined decision trees, which is the number of trees to be tuned, <inline-formula>
<mml:math display="inline" id="im5">
<mml:mrow>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mi>m</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> is the <italic>m</italic>-th tree, <inline-formula>
<mml:math display="inline" id="im6">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> represents the <italic>n</italic>-th input sample, and <italic>F</italic> is the set of all tree models.</p>
</sec>
<sec id="s2_3_5">
<label>2.3.5</label>
<title>Multilayer perceptron</title>
<p>A Multilayer Perceptron (MLP) is a feedforward neural network composed of multiple neurons or nodes, which learns complex nonlinear mappings through connections between input and output vectors. It utilizes a parallel hierarchical structure consisting of an input layer, hidden layers, and an output layer, with information being transmitted through connection weights among these layers to predict target variables (<xref ref-type="bibr" rid="B18">Ehteram et&#xa0;al., 2020</xref>). In an MLP, the sum of the input signals received by a node is transformed through a nonlinear activation function to generate the output signal (<xref ref-type="bibr" rid="B23">Gardner and Dorling, 1998</xref>).</p>
<disp-formula id="eq6">
<label>(6)</label>
<mml:math display="block" id="M6">
<mml:mrow>
<mml:msub>
<mml:mi>s</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mstyle displaystyle="true">
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mrow>
<mml:msub>
<mml:mi>n</mml:mi>
<mml:mn>0</mml:mn>
</mml:msub>
</mml:mrow>
</mml:msubsup>
<mml:mrow>
<mml:msub>
<mml:mi>w</mml:mi>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mi>j</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mstyle>
<mml:mo>+</mml:mo>
<mml:msub>
<mml:mi>a</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</disp-formula>
<disp-formula id="eq7">
<label>(7)</label>
<mml:math display="block" id="M7">
<mml:mrow>
<mml:msub>
<mml:mi>z</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mi>f</mml:mi>
<mml:mo stretchy="false">(</mml:mo>
<mml:msub>
<mml:mi>s</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
<mml:mo stretchy="false">)</mml:mo>
<mml:mo>=</mml:mo>
<mml:msup>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mn>1</mml:mn>
<mml:mo>+</mml:mo>
<mml:msup>
<mml:mi>e</mml:mi>
<mml:mrow>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>c</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
</mml:mrow>
</mml:msup>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mrow>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msup>
</mml:mrow>
</mml:math>
</disp-formula>
<p>In <xref ref-type="disp-formula" rid="eq6">Equations 6</xref>, <xref ref-type="disp-formula" rid="eq7">7</xref>, <inline-formula>
<mml:math display="inline" id="im7">
<mml:mrow>
<mml:msub>
<mml:mi>s</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>&#x200b; represents the input to the <italic>j</italic>-th neuron in the hidden layer, &#x200b; <inline-formula>
<mml:math display="inline" id="im8">
<mml:mrow>
<mml:msub>
<mml:mi>a</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> is the bias for the <italic>j</italic>-th neuron in the hidden layer, <inline-formula>
<mml:math display="inline" id="im9">
<mml:mrow>
<mml:msub>
<mml:mi>w</mml:mi>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mi>j</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> is the weight between the <italic>i</italic>-th input neuron and the <italic>j</italic>-th neuron in the hidden layer, <inline-formula>
<mml:math display="inline" id="im10">
<mml:mrow>
<mml:mi>f</mml:mi>
<mml:mo stretchy="false">(</mml:mo>
<mml:msub>
<mml:mi>b</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:math>
</inline-formula> is the activation function, and <inline-formula>
<mml:math display="inline" id="im11">
<mml:mrow>
<mml:msub>
<mml:mi>z</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> is the output of the <italic>j</italic>-th neuron. The final output of the MLP is obtained by computing a weighted sum of the hidden layer outputs, as shown in <xref ref-type="disp-formula" rid="eq8">Equation 8</xref>:</p>
<disp-formula id="eq8">
<label>(8)</label>
<mml:math display="block" id="M8">
<mml:mrow>
<mml:msub>
<mml:mi>O</mml:mi>
<mml:mi>k</mml:mi>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mstyle displaystyle="true">
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>j</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mrow>
<mml:msub>
<mml:mi>n</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
</mml:mrow>
</mml:msubsup>
<mml:mrow>
<mml:msub>
<mml:mi>w</mml:mi>
<mml:mrow>
<mml:mi>j</mml:mi>
<mml:mi>k</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>z</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mstyle>
<mml:mo>+</mml:mo>
<mml:msub>
<mml:mi>a</mml:mi>
<mml:mi>k</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</disp-formula>
<p>where <inline-formula>
<mml:math display="inline" id="im12">
<mml:mrow>
<mml:msub>
<mml:mi>O</mml:mi>
<mml:mi>k</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>&#x200b; is the output of the <italic>k</italic>-th neuron in the output layer, <inline-formula>
<mml:math display="inline" id="im13">
<mml:mrow>
<mml:msub>
<mml:mi>w</mml:mi>
<mml:mrow>
<mml:mi>j</mml:mi>
<mml:mi>k</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>&#x200b; is the weight between the <italic>j</italic>-th neuron in the hidden layer and the <italic>k</italic>-th neuron in the output layer, and <italic>n<sub>1&#x200b;</sub>
</italic> represents the number of neurons in the hidden layer.</p>
</sec>
<sec id="s2_3_6">
<label>2.3.6</label>
<title>Combined models, sample segmentation, and accuracy assessment</title>
<p>The partial least squares regression (PLSR)-derived latent variables served as input variables for three machine learning architectures: RF, XGBoost, and MLP. Subsequently, the integrated models PLSR-RF, PLSR-XGBoost, and PLSR-MLP were established. This hybrid dimensionality reduction approach effectively mitigated high-dimensionality challenges inherent in spectral data while controlling algorithmic complexity. During latent variable extraction from fractionally differentiated spectra, we implemented a variance retention threshold, where the process was terminated once the cumulative explained variance reached 75%, to preserve critical spectral features.</p>
<p>To effectively split the data into training and validation sets, the train_test_split function from the scikit-learn library in Python 3.10 was used. This function allows for random splitting of the dataset into different subsets, ensuring the independence of model training and validation. The training set accounted for 4/5 of the total samples, while the validation set accounted for 1/5. The model accuracy was evaluated using the coefficient of determination (R&#xb2;), mean squared error (MSE), and mean absolute error (MAE).</p>
</sec>
<sec id="s2_3_7">
<label>2.3.7</label>
<title>Model parameter optimization</title>
<p>To ensure optimal predictive performance, the key hyperparameters of each model were systematically optimized. For the Partial Least Squares Regression (PLSR) model, the optimal number of components (n_components) was determined through exhaustive manual search over a predefined range (1 to 20) with model performance evaluated via 10-fold cross-validation. For the three machine learning models integrated with PLSR-Random Forest (RF), Extreme Gradient Boosting (XGBoost), and Multi-Layer Perceptron (MLP)&#x2014;hyperparameter tuning was performed using grid search with 10-fold cross-validation (<xref ref-type="bibr" rid="B62">Yang and Shami, 2020a</xref>).</p>
<p>In the RF model, the primary parameters optimized included the number of trees (n_estimators, e.g., 100, 200, 300) and the maximum tree depth (max_depth, e.g., 5, 10, 15). For the XGBoost model, key parameters such as the learning rate (learning_rate, e.g., 0.01, 0.05, 0.1), maximum depth (max_depth), and the number of estimators (n_estimators) were adjusted. In the MLP model, optimization focused on the architecture of hidden layers (hidden_layer_sizes, e.g., (100), or (100, 50)), activation function (activation, e.g., ReLU), solver algorithm (solver, e.g., Adam), and the L2 regularization term (alpha).</p>
</sec>
</sec>
</sec>
<sec id="s3" sec-type="results">
<label>3</label>
<title>Results</title>
<sec id="s3_1">
<label>3.1</label>
<title>Descriptive statistics of the samples</title>
<p>A total of 301 leaf samples were collected and analyzed for their total potassium content (expressed in units of 10 g/kg). The results showed that the potassium content ranged from 0.06 to 5.87, with a mean value of 0.81 (<xref ref-type="fig" rid="f2">
<bold>Figure&#xa0;2</bold>
</xref>). The coefficient of variation was calculated to be 1.30, indicating a high degree of variability among the samples. This substantial variation provides a solid foundation for model development and accuracy evaluation in subsequent analysis.</p>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>The leaf potassium content frequency distribution.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1620971-g002.tif">
<alt-text content-type="machine-generated">A histogram of ln(TK) values with a normal distribution curve overlay in red. The x-axis ranges from -3.5 to 2.5, and the y-axis shows frequency. Key statistics are shown: mean 0.81, minimum 0.06, maximum 5.87, standard deviation 1.05, coefficient of variation 1.3, and sample size 301. Bars peak around -1.</alt-text>
</graphic>
</fig>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Fractional differentiation of reflectance and its correlation</title>
<p>
<xref ref-type="fig" rid="f3">
<bold>Figure&#xa0;3</bold>
</xref> illustrates the variations in spectral reflectance with different fractional differentiations. Compared to integer-order differentiations (0th, 1st, 2nd, and 3rd), fractional differentiation exhibits smaller amplitudes and smoother transitions. This gradual transformation maintains the detailed features of the spectral curves and prevents the abrupt fluctuations typically observed in integer-order differentiations. These results suggest that fractional differentiation, demonstrates greater advantages in the analyzing of complex experimental designs.</p>
<fig id="f3" position="float">
<label>Figure&#xa0;3</label>
<caption>
<p>Effect of fractional differentiation orders from FD (0.0) to FD (3.0) on vegetation spectral reflectance: average reflectance spectra for each order.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1620971-g003.tif">
<alt-text content-type="machine-generated">Six graphs display reflectance against wavelength from 500 to 2500 nanometers. Each graph represents different FD values from 0.0 to 3.0, with various colored lines indicating different fractional dimensions. Reflectance decreases as FD values increase, showing distinct spectral features in each plot.</alt-text>
</graphic>
</fig>
<p>
<xref ref-type="fig" rid="f4">
<bold>Figure&#xa0;4</bold>
</xref> illustrates the distribution of absolute correlation coefficients between fractional differentiation spectra and leaf potassium content across fractional differentiation orders ranging from FD (0.0) to FD (3.0), with wavelengths spanning from 400 to 2500 nm. Before fractional differentiation (FD (0.0)), the spectral bands between 400&#x2013;505 nm and 640&#x2013;680 nm show significant correlation with leaf potassium content, though the correlation coefficients are relatively low. As the order of fractional differential (FD) increases&#x2014;particularly between FD (1.5) and FD (3.0)&#x2014;the spectral information in the ranges of 700&#x2013;1100 nm and 1400&#x2013;1800 nm shows stronger correlations with leaf potassium content, with most correlation coefficients exceeding 0.2. The maximum absolute correlation coefficient generally increases from FD (0.0) to FD (2.2), reaching a peak value of 0.46, before declining at higher orders. These findings highlight that selecting an appropriate fractional differentiation order, such as FD (2.2), can effectively improve the correlation between spectral features and the target variable in practical applications.</p>
<fig id="f4" position="float">
<label>Figure&#xa0;4</label>
<caption>
<p>Absolute value distribution of correlation coefficients between fractional differentiation spectra and leaf potassium content, and the maximum absolute value of correlation coefficients for different fractional differentiations.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1620971-g004.tif">
<alt-text content-type="machine-generated">Graph depicting the relationship between wavelength (in nanometers) and fractional differential (FD) values ranging from 0.0 to 3.0. A heat map shows the absolute value of the correlation coefficient, with colors varying from blue to red. Red stars indicate the maximum absolute value of the correlation coefficient across different wavelengths and FD values, peaking around 2200 nm and FD 2.5, with a scale from 0.0 to 0.5 on the right.</alt-text>
</graphic>
</fig>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Performance evaluation of individual models</title>
<p>The performance of the Partial Least Squares Regression (PLSR) model under fractional differentiation is shown in <xref ref-type="fig" rid="f5">
<bold>Figure&#xa0;5a</bold>
</xref>. Across the FD range from 0.0 to 3.0, the R&#xb2; values for the training set consistently exceed those of the validation set by approximately 0.2 to 0.3, suggesting the presence of a certain level of overfitting in the PLSR model. The validation set achieves its highest R&#xb2; value of 0.51 when the fractional differentiation is set to 0.8. Although the model&#x2019;s fitting accuracy is relatively low, it demonstrates stable performance without significant overfitting.</p>
<fig id="f5" position="float">
<label>Figure&#xa0;5</label>
<caption>
<p>Determination Coefficients (R&#xb2;) of different single models (<bold>(a)</bold> PLSR, <bold>(b)</bold> RF, <bold>(c)</bold> XGBoost, <bold>(d)</bold> MLP) for leaf potassium content estimation using fractional differentiation spectra: comparison of training and validation sets across different fractional differentiation orders (FD (0.0) to FD (3.0)).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1620971-g005.tif">
<alt-text content-type="machine-generated">Four line graphs compare the determination coefficients (R&#xb2;) for different models: PLSR, XGBoost, RF, and MLP. Each graph shows results for training and validation sets across fractional differentials from 0 to 3. Training set lines (green) generally have higher R&#xb2; values compared to validation sets (red), with varying trends in each model.</alt-text>
</graphic>
</fig>
<p>As shown in <xref ref-type="fig" rid="f5">
<bold>Figures&#xa0;5b&#x2013;d</bold>
</xref>, the RF, XGBoost, and MLP models all exhibit a marked discrepancy in R&#xb2; values between the training and validation sets, reflecting a clear tendency toward overfitting. In comparison to RF and XGBoost, the MLP model demonstrates marginally superior validation performance, with a maximum R&#xb2; of 0.46, outperforming RF (0.29) and XGBoost (0.38).</p>
<p>In summary, although the PLSR model has limited fitting accuracy in predicting leaf potassium content, it demonstrates good stability. The training set R&#xb2; remains between 0.6 and 0.7, while the validation set R&#xb2; stays between 0.3 and 0.5. In contrast, the RF, XGBoost, and MLP models perform well on the training set but poorly on the validation set, indicating potential overfitting. Therefore, among these four individual models, the PLSR model is the most suitable for estimating leaf potassium content.</p>
</sec>
<sec id="s3_4">
<label>3.4</label>
<title>Performance evaluation and analysis of combined models</title>
<p>The PLSR-RF model (<xref ref-type="fig" rid="f6">
<bold>Figure&#xa0;6a</bold>
</xref>) demonstrates strong fitting and generalization capabilities, as evidenced by its stable performance across most FD settings. The training set achieves consistently high R<sup>2</sup> values around 0.9, while the validation set maintains moderately high R&#xb2; values ranging from approximately 0.75 to 0.89. Notably, within the FD range of 0.5 to 1.3, the validation performance improves sharply, with the R&#xb2; value increasing from 0.01 to 0.77. The model achieves optimal performance at a fractional differentiation of FD (2.7), where the training set R&#xb2; is 0.98, with MSE and MAE of 0.01 and 0.07, respectively. For the validation set, the R&#xb2; value is 0.89, with MSE and MAE of 0.21 and 0.29, respectively.</p>
<fig id="f6" position="float">
<label>Figure&#xa0;6</label>
<caption>
<p>The relationship between different differential orders (FD) and the determination coefficient (R&#xb2;) for training and validation sets across three models: <bold>(a)</bold> PLSR-RF, <bold>(b)</bold> PLSR-XGBoost, and <bold>(c)</bold> PLSR-MLP.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1620971-g006.tif">
<alt-text content-type="machine-generated">Three line graphs compare the determination coefficient (R&#xb2;) of training and validation sets across different fractional differentials (FD). Graph (a) shows the PLSR-RF model, graph (b) the PLSR-XGBoost model, and graph (c) the PLSR-MLP model. Each graph indicates higher R&#xb2; values in training sets compared to validation sets, with noticeable increases around FD 1.0 in all graphs.</alt-text>
</graphic>
</fig>
<p>The PLSR-XGBoost model shows significant fluctuations across different FD settings, particularly for the training set. Despite these fluctuations, the difference in R&#xb2; values between the training and validation sets decreases significantly when the fractional differentiation exceeds 1.2 (<xref ref-type="fig" rid="f6">
<bold>Figure&#xa0;6b</bold>
</xref>). This indicates that the combined model effectively mitigates overfitting. When the fractional differentiation is set to FD (2.7), the model performance reaches its peak, with R&#xb2;, MSE, and MAE values are 0.99, 1.8*10<sup>-5</sup>, and 0.003 for the training set, and 0.94, 0.1, and 0.22 for the validation set, respectively. These findings indicate that PLSR combined with XGBoost provides more stable predictions under higher fractional differentiation levels.</p>
<p>The PLSR-MLP model performs poorly at low fractional differentiation values (FD &lt; 0.8), with validation R&#xb2; remaining below 0.4 between FD (0.2) and FD (0.6). Notably, at FD (0.3), the model exhibits signs of underfitting, as indicated by similarly low performance on both the training and validation sets. This suggests that the MLP has limited adaptability to raw data or data processed with low-order fractional differentiation (<xref ref-type="fig" rid="f6">
<bold>Figure&#xa0;6c</bold>
</xref>). However, as FD increases, the model&#x2019;s performance improves significantly. At FD (2.8), the R&#xb2; values for both the training and validation sets reach 0.99 and 0.96, respectively, with MSE and MAE values of 0.01 and 0.05 for the training set, and 0.07 and 0.16 for the validation set, indicating excellent model performance at this optimal order.</p>
<p>Overall, the three combined models exhibit distinct responses to fractional differentiation. PLSR-RF improves with increasing FD but shows signs of overfitting. PLSR-XGBoost generalizes well when FD &gt; 1.0, despite early instability. While PLSR-MLP achieves the highest accuracy in this study (<xref ref-type="fig" rid="f7">
<bold>Figure&#xa0;7</bold>
</xref>), PLSR-XGBoost involves fewer hyperparameter adjustments, demonstrates high computational efficiency, and facilitates easy deployment Therefore, although PLSR-MLP is the optimal model in terms of predictive performance, PLSR-XGBoost may offer a more practical solution for real-world potassium prediction tasks, especially in scenarios with limited computational resources or where rapid deployment is required.</p>
<fig id="f7" position="float">
<label>Figure&#xa0;7</label>
<caption>
<p>Prediction accuracy of leaf potassium content for each model at the optimal fractional differentiation order, showing the performance of individual models and combined models with evaluation metrics such as R&#xb2;, MSE, and MAE for both training and validation sets.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1620971-g007.tif">
<alt-text content-type="machine-generated">Scatter plots compare predicted and measured values (10 grams per kilogram) for various models: PLSR, RF, XGBoost, MLP, PLSR-RF, PLSR-XGBoost, and PLSR-MLP. Each plot includes training (blue stars) and validation (red dots) data, showing R-squared, mean squared error, and mean absolute error for both. The plots represent different feature dimensions, indicating model performance in prediction accuracy.</alt-text>
</graphic>
</fig>
</sec>
<sec id="s3_5">
<label>3.5</label>
<title>Model comparison and selection of the optimal model</title>
<p>In this study, seven models, namely PLSR, RF, XGBoost, MLP, PLSR-RF, PLSR-XGBoost, and PLSR-MLP, were applied to predict the plant leaf potassium content using spectral differentiation transformation techniques in the karst region of Guangxi Province. The optimal fractional differentiation prediction results for each model are shown in <xref ref-type="fig" rid="f7">
<bold>Figure&#xa0;7</bold>
</xref>. Based on the coefficient of determination (R&#xb2;) on the validation sets, the top three models are PLSR-MLP (R&#xb2;=0.96), PLSR-XGBoost (R&#xb2;=0.94), and PLSR-RF (R&#xb2;=0.89), respectively. In comparison, the RF model alone showed the worst performance, with an R&#xb2; of only 0.29 on the validation sets.</p>
<p>Among these seven models, the PLSR-RF, PLSR-XGBoost, and PLSR-MLP models all effectively predict potassium content in plant leaves in the southwestern karst region. Relative to individual models, the three combined models exhibit improvements of 206%, 147%, and 108% in R<sup>2</sup> on the validation set, respectively. These substantial gains suggest that the combined modeling approach effectively mitigates overfitting and enhances generalization capability.</p>
</sec>
<sec id="s3_6">
<label>3.6</label>
<title>Advantages of fractional differentiation</title>
<p>The fractional differentiation is determined to be the optimal spectral transformation approach for all seven models (<xref ref-type="table" rid="T1">
<bold>Table&#xa0;1</bold>
</xref>). The application of fractional differentiation significantly enhances the models&#x2019; performance in estimating leaf potassium content. For the PLSR model, the optimal fractional differentiation is FD (0.8), resulting in a validation R&#xb2; of 0.51, a marked improvement over the 0th order (R&#xb2; = 0.26), 1st order (R&#xb2; = 0.39), 2nd order (R&#xb2; = 0.33), and 3rd order (R&#xb2; = 0.35). The PLSR-RF model achieves its best performance at FD (2.7), with a validation R&#xb2; of 0.89, significantly outperforming the 0th order (R&#xb2; = 0.005), 1st order (R&#xb2; = 0.58), 2nd order (R&#xb2; = 0.82), and 3rd order (R&#xb2; = 0.86).The PLSR-XGBoost model performs optimally at FD (2.7), with a validation R&#xb2; of 0.94, significantly outperforming the 0th order (R&#xb2; = 0.08), 1st order (R&#xb2; = 0.58), 2nd order (R&#xb2; = 0.83), and 3rd order (R&#xb2; = 0.89). Finally, the PLSR-MLP model achieves its highest validation R&#xb2; of 0.96 at FD (2.8), outperforming all integer orders from 0.0 to 3.0.</p>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Performance comparison of seven models at different fractional differentiation orders (0.0, 1.0, 2.0, 3.0) and the optimal fractional differentiation order, based on evaluation metrics including R&#xb2;, MSE, and MAE for both individual and combined models.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="left">Model</th>
<th valign="middle" align="left">Orders</th>
<th valign="middle" align="left">Training Sets R2</th>
<th valign="middle" align="left">Training Sets MSE</th>
<th valign="middle" align="left">Training Sets MAE</th>
<th valign="middle" align="left">Validation Sets R2</th>
<th valign="middle" align="left">Validation Sets MSE</th>
<th valign="middle" align="left">Validation Sets MAE</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" rowspan="5" align="left">PLSR</td>
<td valign="middle" align="left">FD (0.0)</td>
<td valign="middle" align="left">0.44</td>
<td valign="middle" align="left">0.49</td>
<td valign="middle" align="left">0.49</td>
<td valign="middle" align="left">0.26</td>
<td valign="middle" align="left">1.41</td>
<td valign="middle" align="left">0.77</td>
</tr>
<tr>
<td valign="middle" align="left">FD (1.0)</td>
<td valign="middle" align="left">0.60</td>
<td valign="middle" align="left">0.35</td>
<td valign="middle" align="left">0.40</td>
<td valign="middle" align="left">0.39</td>
<td valign="middle" align="left">1.17</td>
<td valign="middle" align="left">0.60</td>
</tr>
<tr>
<td valign="middle" align="left">FD (2.0)</td>
<td valign="middle" align="left">0.55</td>
<td valign="middle" align="left">0.39</td>
<td valign="middle" align="left">0.40</td>
<td valign="middle" align="left">0.33</td>
<td valign="middle" align="left">1.28</td>
<td valign="middle" align="left">0.60</td>
</tr>
<tr>
<td valign="middle" align="left">FD (3.0)</td>
<td valign="middle" align="left">0.65</td>
<td valign="middle" align="left">0.31</td>
<td valign="middle" align="left">0.35</td>
<td valign="middle" align="left">0.35</td>
<td valign="middle" align="left">1.25</td>
<td valign="middle" align="left">0.58</td>
</tr>
<tr>
<td valign="middle" align="left">
<bold>FD (0.8)</bold>
</td>
<td valign="middle" align="left">
<bold>0.76</bold>
</td>
<td valign="middle" align="left">
<bold>0.21</bold>
</td>
<td valign="middle" align="left">
<bold>0.33</bold>
</td>
<td valign="middle" align="left">
<bold>0.51</bold>
</td>
<td valign="middle" align="left">
<bold>0.94</bold>
</td>
<td valign="middle" align="left">
<bold>0.62</bold>
</td>
</tr>
<tr>
<td valign="middle" rowspan="5" align="left">RF</td>
<td valign="top" align="left">FD (0.0)</td>
<td valign="top" align="left">0.75</td>
<td valign="top" align="left">0.22</td>
<td valign="top" align="left">0.27</td>
<td valign="top" align="left">0.07</td>
<td valign="top" align="left">1.78</td>
<td valign="top" align="left">0.75</td>
</tr>
<tr>
<td valign="top" align="left">FD (1.0)</td>
<td valign="top" align="left">0.77</td>
<td valign="top" align="left">0.2</td>
<td valign="top" align="left">0.25</td>
<td valign="top" align="left">0.15</td>
<td valign="top" align="left">1.63</td>
<td valign="top" align="left">0.71</td>
</tr>
<tr>
<td valign="top" align="left">FD (2.0)</td>
<td valign="top" align="left">0.87</td>
<td valign="top" align="left">0.11</td>
<td valign="top" align="left">0.19</td>
<td valign="top" align="left">0.29</td>
<td valign="top" align="left">1.31</td>
<td valign="top" align="left">0.59</td>
</tr>
<tr>
<td valign="top" align="left">FD (3.0)</td>
<td valign="top" align="left">0.76</td>
<td valign="top" align="left">0.21</td>
<td valign="top" align="left">0.25</td>
<td valign="top" align="left">0.3</td>
<td valign="top" align="left">1.43</td>
<td valign="top" align="left">0.64</td>
</tr>
<tr>
<td valign="top" align="left">
<bold>FD (1.9)</bold>
</td>
<td valign="top" align="left">
<bold>0.87</bold>
</td>
<td valign="top" align="left">
<bold>0.11</bold>
</td>
<td valign="top" align="left">
<bold>0.19</bold>
</td>
<td valign="top" align="left">
<bold>0.29</bold>
</td>
<td valign="top" align="left">
<bold>1.34</bold>
</td>
<td valign="top" align="left">
<bold>0.58</bold>
</td>
</tr>
<tr>
<td valign="middle" rowspan="5" align="left">XGBoost</td>
<td valign="top" align="left">FD (0.0)</td>
<td valign="top" align="left">0.33</td>
<td valign="top" align="left">0.58</td>
<td valign="top" align="left">0.47</td>
<td valign="top" align="left">0.001</td>
<td valign="top" align="left">1.9</td>
<td valign="top" align="left">0.76</td>
</tr>
<tr>
<td valign="top" align="left">FD (1.0)</td>
<td valign="top" align="left">0.72</td>
<td valign="top" align="left">0.23</td>
<td valign="top" align="left">0.31</td>
<td valign="top" align="left">0.17</td>
<td valign="top" align="left">1.59</td>
<td valign="top" align="left">0.69</td>
</tr>
<tr>
<td valign="top" align="left">FD (2.0)</td>
<td valign="top" align="left">0.98</td>
<td valign="top" align="left">0.01</td>
<td valign="top" align="left">0.09</td>
<td valign="top" align="left">0.3</td>
<td valign="top" align="left">1.33</td>
<td valign="top" align="left">0.61</td>
</tr>
<tr>
<td valign="top" align="left">FD (3.0)</td>
<td valign="top" align="left">0.84</td>
<td valign="top" align="left">0.14</td>
<td valign="top" align="left">0.21</td>
<td valign="top" align="left">0.31</td>
<td valign="top" align="left">1.32</td>
<td valign="top" align="left">0.61</td>
</tr>
<tr>
<td valign="top" align="left">
<bold>FD (2.2)</bold>
</td>
<td valign="top" align="left">
<bold>0.99</bold>
</td>
<td valign="top" align="left">
<bold>8*10<sup>-8</sup>
</bold>
</td>
<td valign="top" align="left">
<bold>2.2*10<sup>-4</sup>
</bold>
</td>
<td valign="top" align="left">
<bold>0.38</bold>
</td>
<td valign="top" align="left">
<bold>1.19</bold>
</td>
<td valign="top" align="left">
<bold>0.61</bold>
</td>
</tr>
<tr>
<td valign="middle" rowspan="5" align="left">MLP</td>
<td valign="top" align="left">FD (0.0)</td>
<td valign="top" align="left">0.01</td>
<td valign="top" align="left">0.85</td>
<td valign="top" align="left">0.65</td>
<td valign="top" align="left">0.19</td>
<td valign="top" align="left">1.55</td>
<td valign="top" align="left">0.75</td>
</tr>
<tr>
<td valign="top" align="left">FD (1.0)</td>
<td valign="top" align="left">0.42</td>
<td valign="top" align="left">0.51</td>
<td valign="top" align="left">0.42</td>
<td valign="top" align="left">0.45</td>
<td valign="top" align="left">1.05</td>
<td valign="top" align="left">0.7</td>
</tr>
<tr>
<td valign="top" align="left">FD (2.0)</td>
<td valign="top" align="left">0.73</td>
<td valign="top" align="left">0.23</td>
<td valign="top" align="left">0.32</td>
<td valign="top" align="left">0.17</td>
<td valign="top" align="left">1.59</td>
<td valign="top" align="left">0.97</td>
</tr>
<tr>
<td valign="top" align="left">FD (3.0)</td>
<td valign="top" align="left">0.91</td>
<td valign="top" align="left">0.08</td>
<td valign="top" align="left">0.17</td>
<td valign="top" align="left">0.12</td>
<td valign="top" align="left">1.67</td>
<td valign="top" align="left">0.98</td>
</tr>
<tr>
<td valign="top" align="left">
<bold>FD (0.9)</bold>
</td>
<td valign="top" align="left">
<bold>0.73</bold>
</td>
<td valign="top" align="left">
<bold>0.23</bold>
</td>
<td valign="top" align="left">
<bold>0.37</bold>
</td>
<td valign="top" align="left">
<bold>0.46</bold>
</td>
<td valign="top" align="left">
<bold>1.03</bold>
</td>
<td valign="top" align="left">
<bold>0.7</bold>
</td>
</tr>
<tr>
<td valign="middle" rowspan="5" align="left">PLSR-RF</td>
<td valign="top" align="left">FD (0.0)</td>
<td valign="top" align="left">0.51</td>
<td valign="top" align="left">0.41</td>
<td valign="top" align="left">0.39</td>
<td valign="top" align="left">0.005</td>
<td valign="top" align="left">1.91</td>
<td valign="top" align="left">0.77</td>
</tr>
<tr>
<td valign="top" align="left">FD (1.0)</td>
<td valign="top" align="left">0.93</td>
<td valign="top" align="left">0.06</td>
<td valign="top" align="left">0.15</td>
<td valign="top" align="left">0.58</td>
<td valign="top" align="left">0.79</td>
<td valign="top" align="left">0.49</td>
</tr>
<tr>
<td valign="top" align="left">FD (2.0)</td>
<td valign="top" align="left">0.99</td>
<td valign="top" align="left">0.007</td>
<td valign="top" align="left">0.06</td>
<td valign="top" align="left">0.82</td>
<td valign="top" align="left">0.34</td>
<td valign="top" align="left">0.36</td>
</tr>
<tr>
<td valign="top" align="left">FD (3.0)</td>
<td valign="top" align="left">0.99</td>
<td valign="top" align="left">0.004</td>
<td valign="top" align="left">0.03</td>
<td valign="top" align="left">0.86</td>
<td valign="top" align="left">0.26</td>
<td valign="top" align="left">0.32</td>
</tr>
<tr>
<td valign="top" align="left">
<bold>FD (2.7)</bold>
</td>
<td valign="top" align="left">
<bold>0.98</bold>
</td>
<td valign="top" align="left">
<bold>0.01</bold>
</td>
<td valign="top" align="left">
<bold>0.07</bold>
</td>
<td valign="top" align="left">
<bold>0.89</bold>
</td>
<td valign="top" align="left">
<bold>0.21</bold>
</td>
<td valign="top" align="left">
<bold>0.29</bold>
</td>
</tr>
<tr>
<td valign="middle" rowspan="5" align="center">PLSR-XGBoost</td>
<td valign="top" align="left">FD (0.0)</td>
<td valign="top" align="left">0.52</td>
<td valign="top" align="left">0.41</td>
<td valign="top" align="left">0.38</td>
<td valign="top" align="left">0.05</td>
<td valign="top" align="left">2.0</td>
<td valign="top" align="left">0.77</td>
</tr>
<tr>
<td valign="top" align="left">FD (1.0)</td>
<td valign="top" align="left">0.99</td>
<td valign="top" align="left">1.4*10<sup>-6</sup>
</td>
<td valign="top" align="left">8.4*10<sup>-4</sup>
</td>
<td valign="top" align="left">0.58</td>
<td valign="top" align="left">0.81</td>
<td valign="top" align="left">0.51</td>
</tr>
<tr>
<td valign="top" align="left">FD (2.0)</td>
<td valign="top" align="left">0.99</td>
<td valign="top" align="left">3.1*10<sup>-5</sup>
</td>
<td valign="top" align="left">0.004</td>
<td valign="top" align="left">0.84</td>
<td valign="top" align="left">0.3</td>
<td valign="top" align="left">0.33</td>
</tr>
<tr>
<td valign="top" align="left">FD (3.0)</td>
<td valign="top" align="left">0.99</td>
<td valign="top" align="left">1.9*10<sup>-5</sup>
</td>
<td valign="top" align="left">0.003</td>
<td valign="top" align="left">0.93</td>
<td valign="top" align="left">0.13</td>
<td valign="top" align="left">0.24</td>
</tr>
<tr>
<td valign="top" align="left">
<bold>FD (2.7)</bold>
</td>
<td valign="top" align="left">
<bold>0.99</bold>
</td>
<td valign="top" align="left">
<bold>1.8*<sup>10-5</sup>
</bold>
</td>
<td valign="top" align="left">
<bold>0.003</bold>
</td>
<td valign="top" align="left">
<bold>0.94</bold>
</td>
<td valign="top" align="left">
<bold>0.1</bold>
</td>
<td valign="top" align="left">
<bold>0.22</bold>
</td>
</tr>
<tr>
<td valign="middle" rowspan="5" align="left">PLSR-MLP</td>
<td valign="top" align="left">FD (0.0)</td>
<td valign="top" align="left">0.38</td>
<td valign="top" align="left">0.55</td>
<td valign="top" align="left">0.45</td>
<td valign="top" align="left">0.07</td>
<td valign="top" align="left">1.77</td>
<td valign="top" align="left">0.71</td>
</tr>
<tr>
<td valign="top" align="left">FD (1.0)</td>
<td valign="top" align="left">0.86</td>
<td valign="top" align="left">0.12</td>
<td valign="top" align="left">0.21</td>
<td valign="top" align="left">0.79</td>
<td valign="top" align="left">0.39</td>
<td valign="top" align="left">0.35</td>
</tr>
<tr>
<td valign="top" align="left">FD (2.0)</td>
<td valign="top" align="left">0.99</td>
<td valign="top" align="left">0.007</td>
<td valign="top" align="left">0.06</td>
<td valign="top" align="left">0.91</td>
<td valign="top" align="left">0.06</td>
<td valign="top" align="left">0.16</td>
</tr>
<tr>
<td valign="top" align="left">FD (3.0)</td>
<td valign="top" align="left">0.99</td>
<td valign="top" align="left">0.001</td>
<td valign="top" align="left">0.02</td>
<td valign="top" align="left">0.91</td>
<td valign="top" align="left">0.04</td>
<td valign="top" align="left">0.19</td>
</tr>
<tr>
<td valign="top" align="left">
<bold>FD (2.8)</bold>
</td>
<td valign="top" align="left">
<bold>0.99</bold>
</td>
<td valign="top" align="left">
<bold>0.01</bold>
</td>
<td valign="top" align="left">
<bold>0.07</bold>
</td>
<td valign="top" align="left">
<bold>0.96</bold>
</td>
<td valign="top" align="left">
<bold>0.05</bold>
</td>
<td valign="top" align="left">
<bold>0.16</bold>
</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>This table presents the R<sup>2</sup>, MSE, and MAE for the training and validation sets under the optimal fractional order, with the best-performing results shown in bold.</p>
</fn>
</table-wrap-foot>
</table-wrap>
<p>The results show that the optimal differentiation orders in all seven models are fractional rather than integer. This highlights the advantage of fractional differentiation in improving the accuracy and robustness of leaf potassium content estimation.</p>
</sec>
</sec>
<sec id="s4" sec-type="discussion">
<label>4</label>
<title>Discussion</title>
<sec id="s4_1">
<label>4.1</label>
<title>Distribution of sensitive wavelengths</title>
<p>This study demonstrates that the spectral ranges of 700&#x2013;1100 nm and 1400&#x2013;1800 nm are critical for accurately estimating potassium content in plant leaves. Previous studies have identified the 964&#x2013;1024 nm range as important for detecting potassium status in mature rubber tree leaves (<xref ref-type="bibr" rid="B28">Hu et&#xa0;al., 2024</xref>). In addition, specific wavelengths such as 720 nm and 1027 nm have been shown to play essential roles in predicting potassium content in rapeseed leaves (<xref ref-type="bibr" rid="B68">Zhang et&#xa0;al., 2013</xref>). The sensitive band in the 1400&#x2013;1800 nm range identified in this study also aligns closely with the findings of <xref ref-type="bibr" rid="B45">Pimstein et&#xa0;al. (2011)</xref>, further validating the relevance of this region for potassium estimation. Potassium is an essential ion in plant cells, involved in regulating osmotic pressure, activating enzymatic processes, and controlling stomatal dynamics (<xref ref-type="bibr" rid="B43">Nieves-Cordones et&#xa0;al., 2014</xref>; <xref ref-type="bibr" rid="B65">Yu et&#xa0;al., 2023</xref>). These physiological activities influence leaf cellular structure and water status, thereby indirectly affecting spectral reflectance. In the 700&#x2013;1100 nm range, particularly within the near-infrared region (700&#x2013;900 nm), spectral responses are strongly associated with internal leaf structure, which is sensitive to variations in tissue density and cellular arrangement. Since potassium plays a key role in water transport, cell turgor, and tissue development, changes in potassium levels can induce structural modifications that alter reflectance in this region (<xref ref-type="bibr" rid="B40">Lyu et&#xa0;al., 2023</xref>). Moreover, the short-wave near-infrared region (900&#x2013;1100 nm) captures spectral signals related to leaf water content and biochemical composition, both of which are closely linked to potassium-mediated regulation (<xref ref-type="bibr" rid="B16">Dos Santos et&#xa0;al., 2023</xref>).</p>
<p>The presence of sensitive bands in the 1400&#x2013;1800 nm range is closely linked to the various physiological roles of potassium in plant growth. Potassium influences leaf water transpiration by regulating stomatal opening, which in turn affects spectral reflectance (<xref ref-type="bibr" rid="B36">Lin et&#xa0;al., 2024</xref>). Consequently, potassium-sensitive bands are often found near the peak wavelengths of water absorption, such as 1450 nm and 1950 nm (<xref ref-type="bibr" rid="B65">Yu et&#xa0;al., 2023</xref>). However, some wavelengths farther from these water absorption peaks also show high sensitivity, likely due to changes in plant chemical composition and physiology under the unique environmental conditions of the karst regions. Previous studies have demonstrated significant differences in stoichiometric characteristics between plants in karst and non-karst regions (<xref ref-type="bibr" rid="B69">Zhang et&#xa0;al., 2019</xref>). Potassium is crucial for activating enzymes involved in starch, protein, and fat synthesis, as well as promoting the synthesis of plant hormones that regulate meristem growth (<xref ref-type="bibr" rid="B1">Amirruddin et&#xa0;al., 2020</xref>). These functions may contribute to the sensitive bands distanced from water absorption peaks. Therefore, the presence of such bands in the 1400&#x2013;1800 nm range likely reflects potassium&#x2019;s regulatory effects on physiological traits linked to long-term adaptation of plants to the karst environment.</p>
</sec>
<sec id="s4_2">
<label>4.2</label>
<title>The capabilities of fractional differentiation</title>
<p>Spectral data are often affected by instrument noise, environmental conditions, sample surface scattering, and background signals (<xref ref-type="bibr" rid="B38">Liu et&#xa0;al., 2023</xref>). Preprocessing techniques help mitigate these interferences, yielding a purer spectral signal that prevents the model from being affected by irrelevant signals and reduces errors (<xref ref-type="bibr" rid="B33">Li et&#xa0;al., 2025</xref>). Among these techniques, differentiation&#x2014;particularly fractional differentiation&#x2014;has emerged as a powerful method for capturing subtle spectral details and improving the accuracy of spectral-based estimations.</p>
<p>While traditional preprocessing techniques such as SNV and MSC effectively reduce scattering effects and smooth spectra, they are limited in handling high-noise spectral data (<xref ref-type="bibr" rid="B44">Oliveri et&#xa0;al., 2019</xref>). Differentiation processing of near-infrared spectra effectively removes noise while extracting subtle inflection points and spectral changes (<xref ref-type="bibr" rid="B56">Wang et&#xa0;al., 2018</xref>). <xref ref-type="bibr" rid="B63">Yang et&#xa0;al. (2022)</xref> demonstrated that applying differentiation to crop spectra significantly improves model prediction accuracy. Similarly, <xref ref-type="bibr" rid="B51">Shen et&#xa0;al. (2020)</xref> found that fractional differentiation significantly improves the accuracy of soil organic matter (SOM) content estimation. These studies highlight the significant advantages of differentiation in spectral preprocessing. Our findings similarly show that differentiation enhances the correlation between leaf potassium content and spectral reflectance, thereby improving estimation accuracy.</p>
<p>Differentiation includes both integer-order and fractional differentiation (<xref ref-type="bibr" rid="B31">Jin and Wang, 2022</xref>). Integer-order differentiation typically involves the first and second differentiations. However, the large intervals between these first and second differentiations result in significant differences between the nth and (n+1)th differentiation curves. This limitation causes integer-order differentiation to overlook finer spectral details (<xref ref-type="bibr" rid="B2">Anon, 2020</xref>). In contrast, fractional differentiation can extract detailed spectral information over smaller intervals while minimizing the introduction of excessive high-frequency noise (<xref ref-type="bibr" rid="B71">Zununjan et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B52">Song et&#xa0;al., 2023</xref>). The advantages of fractional differentiation stem from its unique mathematical structure, which, through the Gr&#xfc;nwald-Letnikov definition, achieves a generalized difference structure, smooth attenuation, and long memory effects (<xref ref-type="bibr" rid="B49">Scherer et&#xa0;al., 2011</xref>). This enables fractional differentiation to more accurately capture spectral detail variations in data with complex background noise. <xref ref-type="bibr" rid="B24">Ge et&#xa0;al. (2022)</xref> demonstrated that fractional differentiation is highly effective for processing hyperspectral data in soil salinization risk assessment, with models using fractional differentiation proving more stable than those using integer-order differentiation. This conclusion from <xref ref-type="bibr" rid="B24">Ge et&#xa0;al. (2022)</xref> aligns with our findings, where fractional differentiation outperformed integer-order differentiation in estimating potassium content in plant leaves in the karst region.</p>
<p>However, the application of fractional differentiation also presents challenges. Low-order differentiation transformations provide limited improvement in correlation, while higher-order differentiation does not significantly enhance correlation coefficients between spectral reflectance and potassium content. Additionally, the optimal fractional differentiation varies across models, and similar studies on nutrient inversion in plant leaves suggest that the best fractional differentiation should be chosen based on the specific model being used.</p>
</sec>
<sec id="s4_3">
<label>4.3</label>
<title>Control overfitting</title>
<p>The results indicate that the RF, XGBoost, and MLP models generally exhibit overfitting (<xref ref-type="fig" rid="f5">
<bold>Figure&#xa0;5</bold>
</xref>). Due to their strong nonlinear fitting abilities (<xref ref-type="bibr" rid="B8">Bent&#xe9;jac et&#xa0;al., 2021</xref>), these models tend to capture noise and irrelevant features when handling high-dimensional data, resulting in overfitting (<xref ref-type="bibr" rid="B64">Ying, 2019</xref>).</p>
<p>Common methods to control overfitting include dimensionality reduction, regularization, cross-validation, feature selection (<xref ref-type="bibr" rid="B6">Barbosa et&#xa0;al., 2024</xref>), and ensemble models. Several studies have explored the application of these methods in controlling overfitting. For example, <xref ref-type="bibr" rid="B54">Teresa et&#xa0;al. (2022)</xref> showed that dimensionality reduction effectively addresses over-parameterization in deep learning. <xref ref-type="bibr" rid="B17">Du et&#xa0;al. (2024)</xref> estimated rapeseed growth parameters using an ensemble learning algorithm, achieving better performance than individual machine learning models. For dimensionality reduction, we employed a PLS-based PCA method to extract latent variables that are highly correlated with the target variable. These latent variables were used as input features for the RF, XGBoost, and MLP models, effectively reducing the risk of overfitting in complex datasets.</p>
<p>In addition, hyperparameter optimization is a crucial strategy for mitigating overfitting and improving model generalization (<xref ref-type="bibr" rid="B10">Bischl et&#xa0;al., 2023</xref>). By tuning parameters such as the number of estimators, learning rate, and maximum tree depth (for RF and XGBoost), or the number of hidden layers and neurons (for MLP), models can better balance bias and variance. In this study, we employed grid search combined with cross-validation to optimize the key hyperparameters of each model, thereby reducing overfitting and enhancing predictive robustness. These findings are consistent with previous studies, which have demonstrated that well-tuned models generally outperform those using default configurations, particularly in high-dimensional datasets (<xref ref-type="bibr" rid="B47">Quan, 2024</xref>).</p>
<p>Combining dimensionality reduction with machine learning shows great potential for predicting nutrient content in plant leaves. For instance, <xref ref-type="bibr" rid="B41">Mahajan et&#xa0;al. (2024)</xref> used a PLSR-based machine learning model to predict potassium content in cashew leaves, achieving an R&#xb2; of 0.66. <xref ref-type="bibr" rid="B70">Zhou et&#xa0;al. (2024)</xref> combined PCA with machine learning to predict cadmium content in lettuce leaves, obtaining an R&#xb2; of 0.92 for the validation set. In our study, potassium content estimation in karst plants achieved an R&#xb2; of 0.96 in the prediction set. This result confirms the effectiveness of PLS-based dimensionality reduction for retrieving leaf nutrient content across multiple species. This approach provides a valuable reference for future research.</p>
<p>In summary, combined machine learning models effectively control overfitting and enhance prediction performance. However, our research is limited to the leaf scale, and further validation is needed for their effectiveness in controlling overfitting when applied to UAV or satellite platforms. Future studies should explore the applicability of these models at larger scales and with higher-resolution data to comprehensively assess their generalization and practical value. Moreover, selecting the best model should not rely solely on prediction accuracy; factors such as model complexity, training time, and computational cost must also be taken into account to ensure the model&#x2019;s feasibility and efficiency in real-world applications.</p>
</sec>
</sec>
<sec id="s5" sec-type="conclusions">
<label>5</label>
<title>Conclusions</title>
<p>This study identifies key spectral bands (700&#x2013;1100 nm, and 1400&#x2013;1800 nm) that are critical for estimating potassium content in plant leaves. These bands correspond to important physiological processes, including photosynthesis, pigment concentration, and water regulation, which are influenced by potassium. Fractional differentiation effectively reduces noise and captures subtle spectral features, significantly improving the accuracy of potassium estimation compared to traditional integer-order differentiation.</p>
<p>Furthermore, the study addresses overfitting in machine learning models by combining dimensionality reduction, and advanced algorithms such as Random Forest (RF), Extreme Gradient Boosting (XGBoost), and Multilayer Perceptron (MLP). This integrated approach resulted in a high prediction accuracy (R&#xb2; = 0.96) for potassium content in karst region plants.</p>
<p>In summary, this research advances potassium estimation through hyperspectral data by optimizing data preprocessing, and enhancing model performance. These findings provide valuable insights for plant nutrient monitoring, particularly in complex ecological environments, and offer a foundation for future research on large-scale remote sensing applications.</p>
</sec>
</body>
<back>
<sec id="s6" sec-type="data-availability">
<title>Data availability statement</title>
<p>The datasets presented in this article are not readily available because The data that has been used is confidential. Requests to access the datasets should be directed to Wen He, <email xlink:href="mailto:hw@gxib.cn">hw@gxib.cn</email>.</p>
</sec>
<sec id="s7" sec-type="author-contributions">
<title>Author contributions</title>
<p>ZS: Methodology, Validation, Writing &#x2013; original draft. WH: Conceptualization, Investigation, Methodology, Validation, Writing &#x2013; review &amp; editing. YY: Supervision, Writing &#x2013; review &amp; editing. LY: Investigation, Writing &#x2013; review &amp; editing. JH: Data curation, Investigation, Writing &#x2013; review &amp; editing. YX: Writing &#x2013; review &amp; editing. HW: Investigation, Writing &#x2013; review &amp; editing.</p>
</sec>
<sec id="s8" sec-type="funding-information">
<title>Funding</title>
<p>The author(s) declare that financial support was received for the research and/or publication of this article. This study was supported by the Natural Science Foundation of Guangxi, China (2024GXNSFAA010318), the Key Research and Development Program of Guangxi, China (GuikeAB22035060), the Basic Research Fund of Guangxi Institute of Botany (GUI ZHIYE 23005) and the Fund of Guangxi Key Laboratory of Plant Conservation and Restoration Ecology in Karst Terrain (No.22-035-26).</p>
</sec>
<sec id="s9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="s10" sec-type="ai-statement">
<title>Generative AI statement</title>
<p>The author(s) declare that no Generative AI was used in the creation of this manuscript.</p>
</sec>
<sec id="s11" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Amirruddin</surname> <given-names>A. D.</given-names>
</name>
<name>
<surname>Muharam</surname> <given-names>F. M.</given-names>
</name>
<name>
<surname>Ismail</surname> <given-names>M. H.</given-names>
</name>
<name>
<surname>Tan</surname> <given-names>N. P.</given-names>
</name>
<name>
<surname>Ismail</surname> <given-names>M. F.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Hyperspectral spectroscopy and imbalance data approaches for classification of oil palm&#x2019;s macronutrients observed from frond 9 and 17</article-title>. <source>Comput. Electron. Agric.</source> <volume>178</volume>, <fpage>105768</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2020.105768</pub-id>
</citation>
</ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<collab>Anon</collab>
</person-group> (<year>2020</year>). <article-title>A possible fractional order derivative and optimized spectral indices for assessing total nitrogen content in cotton</article-title>. <source>Comput. Electron. Agric.</source> <volume>171</volume>, <fpage>105275</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2020.105275</pub-id>
</citation>
</ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ansch&#xfc;tz</surname> <given-names>U.</given-names>
</name>
<name>
<surname>Becker</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Shabala</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Going beyond nutrition: Regulation of potassium homoeostasis as a common denominator of plant adaptive responses to environment</article-title>. <source>J. Plant Physiol.</source> <volume>171</volume>, <fpage>670</fpage>&#x2013;<lpage>687</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jplph.2014.01.009</pub-id>
</citation>
</ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Atzberger</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Gu&#xe9;rif</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Baret</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Werner</surname> <given-names>W.</given-names>
</name>
</person-group> (<year>2010</year>). <article-title>Comparative analysis of three chemometric techniques for the spectroradiometric assessment of canopy chlorophyll content in winter wheat</article-title>. <source>Comput. Electron. Agric.</source> <volume>73</volume>, <fpage>165</fpage>&#x2013;<lpage>173</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2010.05.006</pub-id>
</citation>
</ref>
<ref id="B5">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Azadnia</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Rajabipour</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Jamshidi</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Omid</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>New approach for rapid estimation of leaf nitrogen, phosphorus, and potassium contents in apple-trees using Vis/NIR spectroscopy based on wavelength selection coupled with machine learning</article-title>. <source>Comput. Electron. Agric.</source> <volume>207</volume>, <fpage>107746</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.107746</pub-id>
</citation>
</ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Barbosa</surname> <given-names>G. N. N.</given-names>
</name>
<name>
<surname>Andreoni</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Mattos</surname> <given-names>D. M. F.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Optimizing feature selection in intrusion detection systems: Pareto dominance set approaches with mutual information and linear correlation</article-title>. <source>Ad Hoc Networks</source> <volume>159</volume>, <fpage>103485</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.adhoc.2024.103485</pub-id>
</citation>
</ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Benkhettou</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Brito da Cruz</surname> <given-names>A. M. C.</given-names>
</name>
<name>
<surname>Torres</surname> <given-names>D. F. M.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>A fractional calculus on arbitrary time scales: Fractional differentiation and fractional integration</article-title>. <source>Signal Process.</source> <volume>107</volume>, <fpage>230</fpage>&#x2013;<lpage>237</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.sigpro.2014.05.026</pub-id>
</citation>
</ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bent&#xe9;jac</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Cs&#xf6;rg&#x151;</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Mart&#xed;nez-Mu&#xf1;oz</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>A comparative analysis of gradient boosting algorithms</article-title>. <source>Artif. Intell. Rev.</source> <volume>54</volume>, <fpage>1937</fpage>&#x2013;<lpage>1967</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s10462-020-09896-5</pub-id>
</citation>
</ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Berger</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Verrelst</surname> <given-names>J.</given-names>
</name>
<name>
<surname>F&#xe9;ret</surname> <given-names>J. B.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Wocher</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Strathmann</surname> <given-names>M.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>Crop nitrogen monitoring: Recent progress and principal developments in the context of imaging spectroscopy missions</article-title>. <source>Remote Sens. Environ.</source> <volume>242</volume>, <fpage>111758</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2020.111758</pub-id>
</citation>
</ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bischl</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Binder</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Lang</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Pielok</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Richter</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Coors</surname> <given-names>S.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Hyperparameter optimization: Foundations, algorithms, best practices, and open challenges</article-title>. <source>WIREs Data Min. Knowledge Discov.</source> <volume>13</volume>, <elocation-id>e1484</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/widm.v13.2</pub-id>
</citation>
</ref>
<ref id="B11">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Breiman</surname> <given-names>L.</given-names>
</name>
</person-group> (<year>2001</year>). <article-title>Random forests</article-title>. <source>Mach. Learn.</source> <volume>45</volume>, <fpage>5</fpage>&#x2013;<lpage>32</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1023/A:1010933404324</pub-id>
</citation>
</ref>
<ref id="B12">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Cao</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Gao</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Hyperspectral inversion of nitrogen content in maize leaves based on different dimensionality reduction algorithms</article-title>. <source>Comput. Electron. Agric.</source> <volume>190</volume>, <fpage>106461</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2021.106461</pub-id>
</citation>
</ref>
<ref id="B13">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Chen</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Guestrin</surname> <given-names>C.</given-names>
</name>
</person-group> (<year>2016</year>). &#x201c;<article-title>XGBoost: A scalable tree boosting system</article-title>,&#x201d; in <conf-name>Proceedings of the 22nd ACM SIGKDD International Conference on Knowledge Discovery and Data Mining</conf-name>, <conf-loc>San Francisco California USA</conf-loc>: <publisher-name>Association for Computing Machinery</publisher-name>. <fpage>785</fpage>&#x2013;<lpage>794</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1145/2939672.2939785</pub-id>
</citation>
</ref>
<ref id="B14">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>D&#x2019;Ettorre</surname> <given-names>U. S.</given-names>
</name>
<name>
<surname>Liso</surname> <given-names>I. S.</given-names>
</name>
<name>
<surname>Parise</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Desertification in karst areas: A review</article-title>. <source>Earth-Science Rev.</source> <volume>253</volume>, <fpage>104786</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.earscirev.2024.104786</pub-id>
</citation>
</ref>
<ref id="B15">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Doktor</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Lausch</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Spengler</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Thurner</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Extraction of plant physiological status from hyperspectral signatures using machine learning methods</article-title>. <source>Remote Sens.</source> <volume>6</volume>, <fpage>12247</fpage>&#x2013;<lpage>12274</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs61212247</pub-id>
</citation>
</ref>
<ref id="B16">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Dos Santos</surname> <given-names>G. L. A. A.</given-names>
</name>
<name>
<surname>Reis</surname> <given-names>A. S.</given-names>
</name>
<name>
<surname>Besen</surname> <given-names>M. R.</given-names>
</name>
<name>
<surname>Furlanetto</surname> <given-names>R. H.</given-names>
</name>
<name>
<surname>Rodrigues</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Crusiol</surname> <given-names>L. G. T.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Spectral method for macro and micronutrient prediction in soybean leaves using interval partial least squares regression</article-title>. <source>Eur. J. Agron.</source> <volume>143</volume>, <fpage>126717</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.eja.2022.126717</pub-id>
</citation>
</ref>
<ref id="B17">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Du</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Lu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Xiang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Tang</surname> <given-names>Z.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>Estimation of winter canola growth parameter from UAV multi-angular spectral-texture information using stacking-based ensemble learning model</article-title>. <source>Comput. Electron. Agric.</source> <volume>222</volume>, <fpage>109074</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.109074</pub-id>
</citation>
</ref>
<ref id="B18">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ehteram</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Ahmed</surname> <given-names>A. N.</given-names>
</name>
<name>
<surname>Ling</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Fai</surname> <given-names>C. M.</given-names>
</name>
<name>
<surname>Latif</surname> <given-names>S. D.</given-names>
</name>
<name>
<surname>Afan</surname> <given-names>H. A.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>Pipeline scour rates prediction-based model utilizing a multilayer perceptron-colliding body algorithm</article-title>. <source>Water</source> <volume>12</volume>, <fpage>902</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/w12030902</pub-id>
</citation>
</ref>
<ref id="B19">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>F&#xe9;ret</surname> <given-names>J. B.</given-names>
</name>
<name>
<surname>Le Maire</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Jay</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Berveiller</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Bendoula</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Hmimina</surname> <given-names>G.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). <article-title>Estimating leaf mass per area and equivalent water thickness based on leaf optical properties: Potential and limitations of physical modeling and machine learning</article-title>. <source>Remote Sens. Environ.</source> <volume>231</volume>, <fpage>110959</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2018.11.002</pub-id>
</citation>
</ref>
<ref id="B20">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Flynn</surname> <given-names>K. C.</given-names>
</name>
<name>
<surname>Baath</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Lee</surname> <given-names>T. O.</given-names>
</name>
<name>
<surname>Gowda</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Northup</surname> <given-names>B.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Hyperspectral reflectance and machine learning to monitor legume biomass and nitrogen accumulation</article-title>. <source>Comput. Electron. Agric.</source> <volume>211</volume>, <fpage>107991</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.107991</pub-id>
</citation>
</ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Furlanetto</surname> <given-names>R. H.</given-names>
</name>
<name>
<surname>Crusiol</surname> <given-names>L. G. T.</given-names>
</name>
<name>
<surname>Gon&#xe7;alves</surname> <given-names>J. V. F.</given-names>
</name>
<name>
<surname>Nanni</surname> <given-names>M. R.</given-names>
</name>
<name>
<surname>de Oliveira Junior</surname> <given-names>A.</given-names>
</name>
<name>
<surname>de Oliveira</surname> <given-names>F. A.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Machine learning as a tool to predict potassium concentration in soybean leaf using hyperspectral data</article-title>. <source>Precis. Agric.</source> <volume>24</volume>, <fpage>2264</fpage>&#x2013;<lpage>2292</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11119-023-10040-w</pub-id>
</citation>
</ref>
<ref id="B22">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Furlanetto</surname> <given-names>R. H.</given-names>
</name>
<name>
<surname>Crusiol</surname> <given-names>L. G. T.</given-names>
</name>
<name>
<surname>Nanni</surname> <given-names>M. R.</given-names>
</name>
<name>
<surname>de Oliveira Junior</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Sibaldelli</surname> <given-names>R. N. R.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Hyperspectral data for early identification and classification of potassium deficiency in soybean plants (Glycine max (L.) merrill)</article-title>. <source>Remote Sens.</source> <volume>16</volume>, <fpage>1900</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs16111900</pub-id>
</citation>
</ref>
<ref id="B23">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gardner</surname> <given-names>M. W.</given-names>
</name>
<name>
<surname>Dorling</surname> <given-names>S. R.</given-names>
</name>
</person-group> (<year>1998</year>). <article-title>Artificial neural networks (the multilayer perceptron)&#x2014;a review of applications in the atmospheric sciences</article-title>. <source>Atmospheric Environ.</source> <volume>32</volume>, <fpage>2627</fpage>&#x2013;<lpage>2636</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/S1352-2310(97)00447-0</pub-id>
</citation>
</ref>
<ref id="B24">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ge</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Ding</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Teng</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Xie</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>J.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>Exploring the capability of Gaofen-5 hyperspectral data for assessing soil salinity risks</article-title>. <source>Int. J. Appl. Earth Observation Geoinformation</source> <volume>112</volume>, <fpage>102969</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jag.2022.102969</pub-id>
</citation>
</ref>
<ref id="B25">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hasanuzzaman</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Bhuyan</surname> <given-names>M. H. M. B.</given-names>
</name>
<name>
<surname>Nahar</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Hossain</surname> <given-names>M. S.</given-names>
</name>
<name>
<surname>Mahmud</surname> <given-names>J. A.</given-names>
</name>
<name>
<surname>Hossen</surname> <given-names>M. S.</given-names>
</name>
<etal/>
</person-group>. (<year>2018</year>). <article-title>Potassium: A vital regulator of plant responses and tolerance to abiotic stresses</article-title>. <source>Agronomy</source> <volume>8</volume>, <fpage>31</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy8030031</pub-id>
</citation>
</ref>
<ref id="B26">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>He</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Yao</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Gu</surname> <given-names>D.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Using field spectroradiometer to estimate the leaf N/P ratio of mixed forest in a karst area of southern China: A combined model to overcome overfitting</article-title>. <source>Remote Sens.</source> <volume>13</volume>, <fpage>3368</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs13173368</pub-id>
</citation>
</ref>
<ref id="B27">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hong</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>Y.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). <article-title>Application of fractional-order derivative in the quantitative estimation of soil organic matter content through visible and near-infrared spectroscopy</article-title>. <source>Geoderma</source> <volume>337</volume>, <fpage>758</fpage>&#x2013;<lpage>769</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.geoderma.2018.10.025</pub-id>
</citation>
</ref>
<ref id="B28">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hu</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Tang</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>C.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>Handling the challenges of small-scale labeled data and class imbalances in classifying the N and K statuses of rubber leaves using hyperspectroscopy techniques</article-title>. <source>Plant Phenomics</source> <volume>6</volume>, <fpage>0154</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.34133/plantphenomics.0154</pub-id>
</citation>
</ref>
<ref id="B29">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jay</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Maupas</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Bendoula</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Gorretta</surname> <given-names>N.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Retrieving LAI, chlorophyll and nitrogen contents in sugar beet crops from multi-angular optical remote sensing: Comparison of vegetation indices and PROSAIL inversion for field phenotyping</article-title>. <source>Field Crops Res.</source> <volume>210</volume>, <fpage>33</fpage>&#x2013;<lpage>46</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.fcr.2017.05.005</pub-id>
</citation>
</ref>
<ref id="B30">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jiang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Lian</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Qin</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Rocky desertification in Southwest China: Impacts, causes, and restoration</article-title>. <source>Earth-Science Rev.</source> <volume>132</volume>, <fpage>1</fpage>&#x2013;<lpage>12</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.earscirev.2014.01.005</pub-id>
</citation>
</ref>
<ref id="B31">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jin</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Q.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Hyperspectral indices developed from the low order fractional derivative spectra can capture leaf dry matter content across a variety of species better</article-title>. <source>Agric. For. Meteorology</source> <volume>322</volume>, <fpage>109007</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.agrformet.2022.109007</pub-id>
</citation>
</ref>
<ref id="B32">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kong</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Geng</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>Z.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Pixel-level assessment model of contamination conditions of composite insulators based on hyperspectral imaging technology and a semi-supervised ladder network</article-title>. <source>IEEE Trans. Dielectrics Electrical Insulation</source> <volume>30</volume>, <fpage>326</fpage>&#x2013;<lpage>335</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TDEI.2022.3226164</pub-id>
</citation>
</ref>
<ref id="B33">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Ma</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Kong</surname> <given-names>D.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>Rapid detection of fertilizer information based on Raman spectroscopy and machine learning</article-title>. <source>Spectrochimica Acta Part A: Mol. Biomolecular Spectrosc.</source> <volume>324</volume>, <fpage>124985</fpage>.</citation>
</ref>
<ref id="B34">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Xiang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Tang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>X.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>Estimation of soil moisture content based on fractional differential and optimal spectral index</article-title>. <source>Agronomy</source> <volume>14</volume>, <fpage>184</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy14010184</pub-id>
</citation>
</ref>
<ref id="B35">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Lei</surname> <given-names>J.</given-names>
</name>
<name>
<surname>She</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Estimation of leaf water content from hyperspectral data of different plant species by using three new spectral absorption indices</article-title>. <source>PloS One</source> <volume>16</volume>, <elocation-id>e0249351</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pone.0249351</pub-id>
</citation>
</ref>
<ref id="B36">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lin</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Qiao</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Qin</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Miao</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Sheng</surname> <given-names>K.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>A study on an accurate modeling for distinguishing nitrogen, phosphorous and potassium status in summer maize using in <italic>situ</italic> canopy hyperspectral data</article-title>. <source>Comput. Electron. Agric.</source> <volume>221</volume>, <fpage>108989</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.108989</pub-id>
</citation>
</ref>
<ref id="B37">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Ning</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Cai</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Multiscale dense cross-attention mechanism with covariance pooling for hyperspectral image scene classification</article-title>. <source>Mobile Inf. Syst.</source> <volume>2021</volume>, <fpage>9962057</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1155/2021/9962057</pub-id>
</citation>
</ref>
<ref id="B38">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Zareef</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Ouyang</surname> <given-names>Q.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Monitoring chlorophyll changes during Tencha processing using portable near-infrared spectroscopy</article-title>. <source>Food Chem.</source> <volume>412</volume>, <fpage>135505</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.foodchem.2023.135505</pub-id>
</citation>
</ref>
<ref id="B39">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Su</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Qi</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Yao</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Cheng</surname> <given-names>T.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>Monitoring leaf potassium content using hyperspectral vegetation indices in rice leaves</article-title>. <source>Precis. Agric.</source> <volume>21</volume>, <fpage>324</fpage>&#x2013;<lpage>348</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11119-019-09670-w</pub-id>
</citation>
</ref>
<ref id="B40">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lyu</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Grafton</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Ramilan</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Irwin</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Sandoval</surname> <given-names>E.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Assessing the leaf blade nutrient status of pinot noir using hyperspectral reflectance and machine learning models</article-title>. <source>Remote Sens.</source> <volume>15</volume>, <fpage>1497</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs15061497</pub-id>
</citation>
</ref>
<ref id="B41">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mahajan</surname> <given-names>G. R.</given-names>
</name>
<name>
<surname>Das</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Kumar</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Murgaokar</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Patel</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Desai</surname> <given-names>A.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>Spectroscopy-based chemometrics combined machine learning modeling predicts cashew foliar macro- and micronutrients</article-title>. <source>Spectrochimica Acta Part A: Mol. Biomolecular Spectrosc.</source> <volume>320</volume>, <fpage>124639</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.saa.2024.124639</pub-id>
</citation>
</ref>
<ref id="B42">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ni</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Xue</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Miao</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Rapid identification of greenhouse tomato senescent leaves based on the sucrose-spectral quantitative prediction model</article-title>. <source>Biosyst. Eng.</source> <volume>238</volume>, <fpage>200</fpage>&#x2013;<lpage>211</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2024.01.013</pub-id>
</citation>
</ref>
<ref id="B43">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Nieves-Cordones</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Alem&#xe1;n</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Mart&#xed;nez</surname> <given-names>V.</given-names>
</name>
<name>
<surname>Rubio</surname> <given-names>F.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>K+ uptake in plant roots. The systems involved, their regulation and parallels in other organisms</article-title>. <source>J. Plant Physiol.</source> <volume>171</volume>, <fpage>688</fpage>&#x2013;<lpage>695</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jplph.2013.09.021</pub-id>
</citation>
</ref>
<ref id="B44">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Oliveri</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Malegori</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Simonetti</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Casale</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>The impact of signal pre-processing on the final interpretation of analytical outcomes &#x2013; A tutorial</article-title>. <source>Analytica Chimica Acta</source> <volume>1058</volume>, <fpage>9</fpage>&#x2013;<lpage>17</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.aca.2018.10.055</pub-id>
</citation>
</ref>
<ref id="B45">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pimstein</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Karnieli</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Bansal</surname> <given-names>S. K.</given-names>
</name>
<name>
<surname>Bonfil</surname> <given-names>D. J.</given-names>
</name>
</person-group> (<year>2011</year>). <article-title>Exploring remotely sensed technologies for monitoring wheat potassium and phosphorus using field spectroscopy</article-title>. <source>Field Crops Res.</source> <volume>121</volume>, <fpage>125</fpage>&#x2013;<lpage>135</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.fcr.2010.12.001</pub-id>
</citation>
</ref>
<ref id="B46">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Jia</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2008</year>). <article-title>Fractional differential approach to detecting textural features of digital image and its fractional differential filter implementation</article-title>. <source>Sci. China Ser. F: Inf. Sci.</source> <volume>51</volume>, <fpage>1319</fpage>&#x2013;<lpage>1339</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11432-008-0098-x</pub-id>
</citation>
</ref>
<ref id="B47">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Quan</surname> <given-names>S. J.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Comparing hyperparameter tuning methods in machine learning based urban building energy modeling: A study in Chicago</article-title>. <source>Energy Buildings</source> <volume>317</volume>, <fpage>114353</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.enbuild.2024.114353</pub-id>
</citation>
</ref>
<ref id="B48">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Reddy</surname> <given-names>D. D.</given-names>
</name>
<name>
<surname>Veeranki</surname> <given-names>K.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>Simple and inexpensive water extraction method for assaying potassium concentration in tobacco plant tissue</article-title>. <source>Commun. Soil Sci. Plant Anal.</source> <volume>44</volume>, <fpage>962</fpage>&#x2013;<lpage>970</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/00103624.2012.747603</pub-id>
</citation>
</ref>
<ref id="B49">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Scherer</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Kalla</surname> <given-names>S. L.</given-names>
</name>
<name>
<surname>Tang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2011</year>). <article-title>The Gr&#xfc;nwald&#x2013;Letnikov method for fractional differential equations</article-title>. <source>Comput. Mathematics Appl.</source> <volume>62</volume>, <fpage>902</fpage>&#x2013;<lpage>917</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.camwa.2011.03.054</pub-id>
</citation>
</ref>
<ref id="B50">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shah</surname> <given-names>S. H.</given-names>
</name>
<name>
<surname>Angel</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Houborg</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Ali</surname> <given-names>S.</given-names>
</name>
<name>
<surname>McCabe</surname> <given-names>M. F.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>A random forest machine learning approach for the retrieval of leaf chlorophyll content in wheat</article-title>. <source>Remote Sens.</source> <volume>11</volume>, <fpage>920</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs11080920</pub-id>
</citation>
</ref>
<ref id="B51">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shen</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Gao</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Yan</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Z. L.</given-names>
</name>
<name>
<surname>Leng</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>Q.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>Hyperspectral estimation of soil organic matter content using different spectral preprocessing techniques and PLSR method</article-title>. <source>Remote Sens.</source> <volume>12</volume>, <fpage>1206</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs12071206</pub-id>
</citation>
</ref>
<ref id="B52">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Song</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Jin</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Fractional-order derivative spectral transformations improved partial least squares regression estimation of photosynthetic capacity from hyperspectral reflectance</article-title>. <source>IEEE Trans. Geosci. Remote Sens.</source> <volume>61</volume>, <fpage>1</fpage>&#x2013;<lpage>10</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TGRS.2023.3270892</pub-id>
</citation>
</ref>
<ref id="B53">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tan</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Ding</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Han</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Y.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>Estimating soil salinity in mulched cotton fields using UAV-based hyperspectral remote sensing and a Seagull Optimization Algorithm-Enhanced Random Forest Model</article-title>. <source>Comput. Electron. Agric.</source> <volume>221</volume>, <fpage>109017</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.109017</pub-id>
</citation>
</ref>
<ref id="B54">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Teresa</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Hogg</surname> <given-names>D. W.</given-names>
</name>
<name>
<surname>Villar</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Dimensionality reduction, regularization, and generalization in overparameterized regressions</article-title>. <source>SIAM J. Mathematics Data Sci.</source> <volume>4</volume>, <fpage>126</fpage>&#x2013;<lpage>152</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1137/20M1387821</pub-id>
</citation>
</ref>
<ref id="B55">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Lu</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Feng</surname> <given-names>W.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>A novel improved model for building energy consumption prediction based on model integration</article-title>. <source>Appl. Energy</source> <volume>262</volume>, <fpage>114561</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.apenergy.2020.114561</pub-id>
</citation>
</ref>
<ref id="B56">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Kung te</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Johnson</surname> <given-names>V. C.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>New methods for improving the remote sensing estimation of soil organic matter content (SOMC) in the Ebinur Lake Wetland National Nature Reserve (ELWNNR) in northwest China</article-title>. <source>Remote Sens. Environ.</source> <volume>218</volume>, <fpage>104</fpage>&#x2013;<lpage>118</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2018.09.020</pub-id>
</citation>
</ref>
<ref id="B57">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Chan</surname> <given-names>N. W.</given-names>
</name>
<name>
<surname>Kung te</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>S.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>Estimation of soil salt content using machine learning techniques based on remote-sensing fractional derivatives, a case study in the Ebinur Lake Wetland National Nature Reserve, Northwest China</article-title>. <source>Ecol. Indic.</source> <volume>119</volume>, <fpage>106869</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ecolind.2020.106869</pub-id>
</citation>
</ref>
<ref id="B58">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Zheng</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Shen</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Guo</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>The critical role of potassium in plant stress response</article-title>. <source>Int. J. Mol. Sci.</source> <volume>14</volume>, <fpage>7370</fpage>&#x2013;<lpage>7390</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/ijms14047370</pub-id>
</citation>
</ref>
<ref id="B59">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Xie</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>B.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Hyperspectral images denoising via nonconvex regularized low-rank and sparse matrix decomposition</article-title>. <source>IEEE Trans. Image Process.</source> <volume>29</volume>, <fpage>44</fpage>&#x2013;<lpage>56</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TIP.83</pub-id>
</citation>
</ref>
<ref id="B60">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Feng</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Song</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Xie</surname> <given-names>Y.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Study on hyperspectral estimation model of soil organic carbon content in the wheat field under different water treatments</article-title>. <source>Sci. Rep.</source> <volume>11</volume>, <fpage>18582</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41598-021-98143-0</pub-id>
</citation>
</ref>
<ref id="B61">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yang</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Lu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Liao</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Qi</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Yao</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Cheng</surname> <given-names>T.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Retrieving potassium levels in wheat blades using normalised spectra</article-title>. <source>Int. J. Appl. Earth Observation Geoinformation</source> <volume>102</volume>, <fpage>102412</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jag.2021.102412</pub-id>
</citation>
</ref>
<ref id="B62">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Shami</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2020</year>a). <article-title>On hyperparameter optimization of machine learning algorithms: Theory and practice</article-title>. <source>Neurocomputing</source> <volume>415</volume>, <fpage>295</fpage>&#x2013;<lpage>316</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.neucom.2020.07.061</pub-id>
</citation>
</ref>
<ref id="B63">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yang</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Xiong</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Du</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Piecewise preprocessing of near-infrared spectra for improving prediction ability of a PLS model</article-title>. <source>Infrared Phys. Technol.</source> <volume>126</volume>, <fpage>104359</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.infrared.2022.104359</pub-id>
</citation>
</ref>
<ref id="B64">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ying</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>An overview of overfitting and its solutions</article-title>. <source>J. Physics: Conf. Ser.</source> <volume>1168</volume>, <fpage>022022</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1088/1742-6596/1168/2/022022</pub-id>
</citation>
</ref>
<ref id="B65">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Sui</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Prediction of potassium content in rice leaves based on spectral features and random forests</article-title>. <source>Agronomy</source> <volume>13</volume>, <fpage>2337</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy13092337</pub-id>
</citation>
</ref>
<ref id="B66">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yue</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Jiao</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>B.</given-names>
</name>
<etal/>
</person-group>. (<year>2010</year>). <article-title>Exploring the relationship between vegetation spectra and eco-geo-environmental conditions in karst region, Southwest China</article-title>. <source>Environ. Monit. Assess.</source> <volume>160</volume>, <fpage>157</fpage>&#x2013;<lpage>168</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s10661-008-0665-z</pub-id>
</citation>
</ref>
<ref id="B67">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Bengio</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Hardt</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Recht</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Vinyals</surname> <given-names>O.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Understanding deep learning (still) requires rethinking generalization</article-title>. <source>Commun. ACM</source> <volume>64</volume>, <fpage>107</fpage>&#x2013;<lpage>115</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1145/3446776</pub-id>
</citation>
</ref>
<ref id="B68">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>F.</given-names>
</name>
<name>
<surname>He</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Gong</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>Detecting macronutrients content and distribution in oilseed rape leaves based on hyperspectral imaging</article-title>. <source>Biosyst. Eng.</source> <volume>115</volume>, <fpage>56</fpage>&#x2013;<lpage>65</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2013.02.007</pub-id>
</citation>
</ref>
<ref id="B69">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Lv</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Dai</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Tang</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>S.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). <article-title>Comparative study of the stoichiometric characteristics of karst and non-karst forests in Guizhou, China</article-title>. <source>J. Forestry Res.</source> <volume>30</volume>, <fpage>799</fpage>&#x2013;<lpage>806</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11676-018-0806-3</pub-id>
</citation>
</ref>
<ref id="B70">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhou</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Jing</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>J.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>Estimation of cadmium content in lactuca sativa L. Leaves using visible&#x2013;near-infrared spectroscopy technology</article-title>. <source>Agronomy</source> <volume>14</volume>, <fpage>644</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy14040644</pub-id>
</citation>
</ref>
<ref id="B71">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zununjan</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Turghan</surname> <given-names>M. A.</given-names>
</name>
<name>
<surname>Sattar</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Kasim</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Emin</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Abliz</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Combining the fractional order derivative and machine learning for leaf water content estimation of spring wheat using hyper-spectral indices</article-title>. <source>Plant Methods</source> <volume>20</volume>, <fpage>97</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s13007-024-01224-0</pub-id>
</citation>
</ref>
</ref-list>
</back>
</article>