<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Plant Sci.</journal-id>
<journal-title>Frontiers in Plant Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Plant Sci.</abbrev-journal-title>
<issn pub-type="epub">1664-462X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpls.2025.1612430</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Plant Science</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Integrating PROSPECT-D physics and adversarial domain adaptation resnet for robust cross-ecosystem plant traits estimation</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name>
<surname>Zhang</surname>
<given-names>Hui</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/3096051/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Su</surname>
<given-names>Haoxuan</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Shen</surname>
<given-names>Tie</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Sun</surname>
<given-names>Guangyao</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/3032392/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Wang</surname>
<given-names>Qi</given-names>
</name>
<xref ref-type="aff" rid="aff4">
<sup>4</sup>
</xref>
<xref ref-type="author-notes" rid="fn001">
<sup>*</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
</contrib>
</contrib-group>
<aff id="aff1">
<sup>1</sup>
<institution>School of Information, Guizhou University of Finance and Economics</institution>, <addr-line>Guiyang</addr-line>,&#xa0;<country>China</country>
</aff>
<aff id="aff2">
<sup>2</sup>
<institution>Guizhou Provincial Leading Talent Workstation for Protein Design and Biological Imaging Innovation, Key Laboratory of National Forestry and Grassland Administration on Biodiversity Conservation in Karst Mountainous Areas of Southwestern China, College of Life Science, Guizhou Normal University</institution>, <addr-line>Guiyang</addr-line>,&#xa0;<country>China</country>
</aff>
<aff id="aff3">
<sup>3</sup>
<institution>College of Information and Electrical Engineering, China Agricultural University</institution>, <addr-line>Beijing</addr-line>,&#xa0;<country>China</country>
</aff>
<aff id="aff4">
<sup>4</sup>
<institution>State Key Laboratory of Public Big Data, College of Computer Science and Technology, Guizhou University</institution>, <addr-line>Guiyang</addr-line>,&#xa0;<country>China</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>Edited by: Huajian Liu, University of Adelaide, Australia</p>
</fn>
<fn fn-type="edited-by">
<p>Reviewed by: Yonggui Xiao, Chinese Academy of Agricultural Sciences (CAAS), China</p>
<p>Ruiheng Zhang, Beijing Institute of Technology, China</p>
<p>Lang Qiao, University of Minnesota Twin Cities, United States</p>
</fn>
<fn fn-type="corresp" id="fn001">
<p>*Correspondence: Qi Wang, <email xlink:href="mailto:qiwang@gzu.edu.cn">qiwang@gzu.edu.cn</email>
</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>25</day>
<month>07</month>
<year>2025</year>
</pub-date>
<pub-date pub-type="collection">
<year>2025</year>
</pub-date>
<volume>16</volume>
<elocation-id>1612430</elocation-id>
<history>
<date date-type="received">
<day>15</day>
<month>04</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>03</day>
<month>07</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2025 Zhang, Su, Shen, Sun and Wang</copyright-statement>
<copyright-year>2025</copyright-year>
<copyright-holder>Zhang, Su, Shen, Sun and Wang</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<p>Plant functional traits, including chlorophyll content (CHL), equivalent water thickness (EWT), and leaf mass per area (LMA), are critical indicators for assessing ecosystem functioning, functional diversity, and their roles in the Earth system. Hyperspectral remote sensing serves as a pivotal tool for multi-trait mapping; however, existing methods exhibit limited generalizability across ecosystems, land cover types, and sensor modalities. Challenges such as data heterogeneity, domain shifts, and sparse <italic>in situ</italic> measurements further hinder model generalization. To address these limitations, this study developed PPADA-Net, a novel framework integrating PROSPECT-D radiative transfer modeling with adversarial domain adaptation for robust cross-ecosystem plant trait prediction. In a two-stage process, a residual network is pretrained on synthetic spectra from PROSPECT-D to capture biophysical links between leaf traits and spectral signatures, followed by adversarial learning to align source and target domain features, reducing domain shifts. The model&#x2019;s performance is validated on four public datasets and one field-measured dataset. PPADA-Net outperforms traditional partial least squares regression (PLSR) and purely data-driven models (e.g., ResNet), achieving mean R&#xb2; values of 0.72 (CHL),0.77 (EWT), and 0.86 (LMA). Additionally, PPADA-Net demonstrates practical utility in a real-world farmland dataset (D5), achieving high-precision spatial mapping with an nRMSE of 0.07 for LMA. By merging physical priors with adaptive learning, PPADA-Net enhances spectral-trait modeling under data scarcity, offering a scalable tool for ecosystem monitoring, precision agriculture, and climate adaptation.</p>
</abstract>
<kwd-group>
<kwd>hyperspectral</kwd>
<kwd>deep learning</kwd>
<kwd>plant phenotyping</kwd>
<kwd>adversarial domain adaptation</kwd>
<kwd>plant functional traits</kwd>
</kwd-group>
<counts>
<fig-count count="12"/>
<table-count count="4"/>
<equation-count count="5"/>
<ref-count count="40"/>
<page-count count="19"/>
<word-count count="9931"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-in-acceptance</meta-name>
<meta-value>Technical Advances in Plant Science</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>Global climate change and the sustainable management of ecosystems are major concerns for the international community. The 2015 Paris Agreement set a clear goal of limiting the global temperature increase to below 1.5&#xb0;C, identifying carbon neutrality as a crucial strategy to achieve this objective. In this context, accurately monitoring vegetation health and functional dynamics is critically important, as vegetation serves as a primary carbon sink in terrestrial ecosystems (<xref ref-type="bibr" rid="B10">Fatichi et&#xa0;al., 2019</xref>). Thus, there is an urgent need for efficient, reliable methods to quantify plant functional traits, supporting carbon sink assessments, vegetation restoration monitoring, and precision agriculture.</p>
<p>Plant functional traits are critical indicators for understanding ecosystem dynamics, vegetation health, and biogeochemical cycles (<xref ref-type="bibr" rid="B34">Wright et&#xa0;al., 2004</xref>). For example, chlorophyll content (CHL), which quantifies leaf chlorophyll pigment concentration, directly reflects photosynthetic capacity and nitrogen status, making it vital for assessing plant productivity and stress responses (<xref ref-type="bibr" rid="B24">Meloni et&#xa0;al., 2003</xref>). Equivalent water thickness (EWT), defined as the mass of water per unit leaf area, offers insights into plant water-use efficiency and drought resilience, thereby informing irrigation management and climate adaptation strategies (<xref ref-type="bibr" rid="B18">Hunt and Rock, 1989</xref>). Leaf mass per area (LMA), representing the ratio of leaf dry mass to area, correlates with leaf longevity, carbon allocation, and environmental stress resistance, serving as a key parameter for modeling carbon sequestration and ecosystem functions (<xref ref-type="bibr" rid="B27">Poorter et&#xa0;al., 2009</xref>). Collectively, these traits underpin efforts to monitor global vegetation changes, predict agricultural yields, and mitigate climate impacts (<xref ref-type="bibr" rid="B9">Drenovsky et&#xa0;al., 2012</xref>). However, traditional measurement methods such as destructive sampling and laboratory analysis are labor-intensive, time-consuming, and low throughput, limiting their applicability across large spatial and temporal scales (<xref ref-type="bibr" rid="B40">Zhang et&#xa0;al., 2025b</xref>). Consequently, there is an urgent need for non-destructive, high-efficiency approaches to estimate plant traits rapidly and accurately, enabling real-time decision-making in precision agriculture, ecological conservation, and climate resilience initiatives.</p>
<p>Hyperspectral remote sensing has emerged as a powerful tool for non-destructive and high-throughput estimation of plant traits (<xref ref-type="bibr" rid="B3">Angel and Shiklomanov, 2022</xref>), providing rich spectral information across hundreds of narrow bands to detect subtle biochemical and physiological variations (<xref ref-type="bibr" rid="B31">Sun et&#xa0;al., 2025</xref>). For instance, <xref ref-type="bibr" rid="B16">Hoeppner et&#xa0;al. (2020)</xref> leveraged hyperspectral data to predict CHL in forest ecosystems by analyzing reflectance features in the visible-red edge regions (680&#x2013;780 nm), achieving robust correlations with ground-truth measurements. Similarly, <xref ref-type="bibr" rid="B29">Shu et&#xa0;al. (2022)</xref> demonstrated the utility of hyperspectral images in estimating EWT for crops, enabling real-time drought monitoring in precision agriculture. These studies underscore hyperspectral imaging&#x2019;s capability to resolve trait-specific spectral signatures. However, existing approaches primarily focus on single ecosystem applications (forests or croplands), where models are trained and validated in homogeneous environments. This limits their generalizability to heterogeneous ecosystems, such as transitions from controlled agricultural fields to natural grasslands or mixed forests, where spectral-trait relationships may vary significantly due to differences in species composition, canopy structure, and environmental stressors (<xref ref-type="bibr" rid="B14">Heidenreich and Richardson, 2020</xref>). Consequently, improving model transferability across ecosystems remains a critical challenge, necessitating frameworks that address spectral heterogeneity and domain shifts inherent to multi-environment applications.</p>
<p>Radiative transfer models (RTMs), such as the widely adopted PROSPECT model, offer a mechanistic framework for simulating vegetation reflectance spectra based on biophysical and biochemical properties (<xref ref-type="bibr" rid="B13">Haboudane et&#xa0;al., 2004</xref>). PROSPECT has been extensively used to retrieve key plant traits, including CHL, EWT, and LMA, by inversely linking observed hyperspectral data to simulated canopy reflectance (<xref ref-type="bibr" rid="B19">Jacquemoud and Baret, 1990</xref>). For example, <xref ref-type="bibr" rid="B32">Wang et&#xa0;al. (2015)</xref> demonstrated PROSPECT&#x2019;s capability to estimate CHL, LMA and nitrogen content in leaves, leveraging its parameterization of leaf biochemistry and canopy architecture. This model enables researchers to disentangle complex interactions between light and vegetation, such as the influence of leaf dry matter on SWIR reflectance or water content on NIR absorption, thereby offering interpretable insights into spectral-trait relationships (<xref ref-type="bibr" rid="B5">Broge and Leblanc, 2001</xref>). However, PROSPECT&#x2019;s practical application faces inherent limitations, most notably the ill-posed inverse problem: multiple combinations of input parameters can generate nearly identical canopy reflectance spectra (<xref ref-type="bibr" rid="B20">Jing et&#xa0;al., 2004</xref>), leading to non-unique solutions and heightened uncertainty in trait retrieval. Additionally, the model&#x2019;s performance depends critically on accurate prior knowledge of species-specific parameters, which may vary significantly across ecosystems or under stress conditions. These challenges underscore the need for hybrid approaches that integrate physical models with data-driven techniques to enhance robustness and scalability in trait estimation.</p>
<p>Data-driven approaches, particularly machine learning (ML) and deep learning (DL), have gained prominence in plant trait estimation by leveraging spectral data to establish empirical relationships between reflectance and biochemical properties (<xref ref-type="bibr" rid="B30">Sun et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B39">Zhang et&#xa0;al., 2021</xref>). Traditional ML methods, such as partial least squares regression (PLSR) and random forest (RF), have been successfully used to predict traits like LMA by integrating spectral reflectance and vegetation indices. For instance, <xref ref-type="bibr" rid="B15">Helsen et&#xa0;al. (2021)</xref> explored the potential of hyperspectral leaf reflectance-based PLSR model to predict LMA and EWT at the intraspecific level for two herbs and two shrubs. Similarly, <xref ref-type="bibr" rid="B35">Yin et&#xa0;al. (2023)</xref> employed RF to map CHL in crops by combining multispectral features with environmental covariates, demonstrating the flexibility of ML in handling high-dimensional data. However, these models often fail when applied to novel environments, where trait-spectra relationships differ substantially. Deep learning offers a promising alternative by automating hierarchical feature extraction and capturing nonlinear interactions. For example, <xref ref-type="bibr" rid="B36">Yue et&#xa0;al. (2024)</xref> developed a convolutional neural network (CNN) to estimate leaf chlorophyll content using hyperspectral reflectance data, while the CNN excels at prediction in each individual growth stage. Despite these advances, DL models face two critical challenges: (1) training robust networks requires extensive field measurements, which are labor-intensive to collect for traits like LMA or EWT, and (2) spectral features extracted from one ecosystem may misalign with those from another, degrading prediction accuracy in cross-domain scenarios. These limitations underscore the need for adaptive frameworks that integrate data-driven learning with domain-invariant representations to enable reliable trait estimation across heterogeneous environments.</p>
<p>Transfer learning is a paradigm that leverages knowledge from source domains to improve model performance in target domains with limited labeled data, offers a transformative solution to address domain shifts in spectral-trait modeling (<xref ref-type="bibr" rid="B25">Pan and Yang, 2010</xref>). Traditional trait estimation models, including physically based radiative transfer models (RTMs) such as PROSPECT, and empirical data-driven approaches like PLSR and RF, have shown effectiveness within homogeneous ecosystems but face significant challenges in cross-ecosystem applications. RTMs suffer from the ill-posed inverse problem and rely heavily on accurate species-specific parameterization, which is often unavailable or inaccurate across diverse environments, resulting in ambiguous and uncertain trait retrieval. Similarly, data-driven models tend to perform poorly when spectral-trait relationships vary due to differences in species composition, canopy structure, and environmental conditions, limiting their generalizability and practical applicability across heterogeneous landscapes. By reusing pre-trained features or aligning feature distributions across domains, transfer learning reduces dependency on large target-domain datasets while enhancing generalization (<xref ref-type="bibr" rid="B28">Radford et&#xa0;al., 2021</xref>). For example, adversarial domain adaptation, a subfield of transfer learning, employs domain-discriminative networks to reduce discrepancies between source and target feature spaces. <xref ref-type="bibr" rid="B12">Ganin et&#xa0;al. (2016)</xref> demonstrated this approach&#x2019;s efficacy in computer vision through Domain-Adversarial Neural Networks (DANN), where adversarial training aligned image features across disparate datasets, achieving 20% higher accuracy in cross-domain tasks. In plant trait estimation, such techniques hold promise for bridging spectral heterogeneity across ecosystems. <xref ref-type="bibr" rid="B4">Bhadra et&#xa0;al. (2024)</xref> proposed a transfer learning approach combined with PROSAIL simulated data to achieve accurate prediction of CHL and average leaf angle based on UAV hyperspectral imagery. However, their application remains underexplored, particularly in scenarios where spectral signatures diverge due to variations in species composition, canopy structure, or environmental conditions. For example, while models trained on agricultural crop spectra may struggle to generalize to forest ecosystems, adversarial learning could theoretically align domain-invariant traits to mitigate such discrepancies (<xref ref-type="bibr" rid="B2">Amirkolaee et&#xa0;al., 2024</xref>).</p>
<p>Despite this potential, few studies have systematically evaluated transfer learning&#x2019;s capacity to enhance cross-ecosystem trait prediction, leaving critical gaps in understanding how to optimize domain adaptation for vegetation monitoring, limiting their applicability across ecosystems with varying species composition, canopy structures, and environmental conditions. Existing data-driven models often function as black boxes, providing limited interpretability and performing poorly under data scarcity, which restricts their applicability across ecosystems with varying species compositions, canopy structures, and environmental conditions. In contrast, physical models such as PROSPECT-D are grounded in well-established biophysical principles but suffer from issues of non-uniqueness and sensitivity to prior assumptions, particularly in heterogeneous landscapes. These complementary strengths and limitations suggest a promising direction: integrating physical modeling with adversarial domain adaptation. Specifically, pretraining on synthetic spectra&#x2013;trait pairs generated by PROSPECT-D introduces biophysical priors into the model, while adversarial learning facilitates the alignment of cross-domain representations, thereby enhancing generalization under spectral heterogeneity. In summary, the primary objectives of this study are: (1) to evaluate the effectiveness of integrating data-driven models and physical models for plant trait prediction using hyperspectral data; (2) to develop a transfer learning framework based on adversarial domain adaptation to improve model generalization and transferability across heterogeneous environmental conditions; (3) to validate the proposed models and methodologies on real-world crop datasets, assessing their practical applicability and robustness across diverse field conditions.</p>
</sec>
<sec id="s2" sec-type="materials|methods">
<label>2</label>
<title>Materials and methods</title>
<sec id="s2_1">
<label>2.1</label>
<title>Dataset collection</title>
<p>This study employs five independent datasets (<xref ref-type="table" rid="T1">
<bold>Table&#xa0;1</bold>
</xref>) from distinct ecosystems, each containing measurements of CHL, EWT, LMA, and corresponding leaf reflectance spectra. All spectral data were acquired using a high-resolution spectroradiometer (1 nm resolution). The LMA, CHL, and EWT values in each dataset were determined following standardized protocols, with LMA calculated as the ratio of leaf dry mass to projected area. Dataset 1 (D1) comprises data from five forest plant species predominantly distributed in temperate regions. Dataset 2 (D2) contains six tropical plant species spanning subtropical and tropical ecosystems. Dataset 3 (D3) includes two herbaceous species cultivated under controlled laboratory conditions. Dataset 4 (D4) incorporates two xerophytic plant species adapted to arid environments with water-limited growth conditions. These four datasets (D1&#x2013;D4) are publicly available and can be accessed through the EcoSIS (Ecological Spectral Information System) platform at <ext-link ext-link-type="uri" xlink:href="https://ecosis.org">https://ecosis.org</ext-link>.</p>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Statistical description of the dataset.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Dataset</th>
<th valign="middle" align="center">Instrument</th>
<th valign="middle" align="center">Spectral range</th>
<th valign="middle" align="center">No. species</th>
<th valign="middle" align="center">Number of samples</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="center">D1</td>
<td valign="middle" align="center">ASD FieldSpec3</td>
<td valign="middle" align="center">350-2500</td>
<td valign="middle" align="center">5</td>
<td valign="middle" align="center">212</td>
</tr>
<tr>
<td valign="middle" align="center">D2</td>
<td valign="middle" align="center">Perkin Elmer Lambda-19</td>
<td valign="middle" align="center">400-2400</td>
<td valign="middle" align="center">6</td>
<td valign="middle" align="center">356</td>
</tr>
<tr>
<td valign="middle" align="center">D3</td>
<td valign="middle" align="center">SVC HR 1024i</td>
<td valign="middle" align="center">350-2500</td>
<td valign="middle" align="center">2</td>
<td valign="middle" align="center">178</td>
</tr>
<tr>
<td valign="middle" align="center">D4</td>
<td valign="middle" align="center">ASD FieldSpec</td>
<td valign="middle" align="center">400-2450</td>
<td valign="middle" align="center">2</td>
<td valign="middle" align="center">251</td>
</tr>
<tr>
<td valign="middle" align="center">D5(ours)</td>
<td valign="middle" align="center">ASD FieldSpec3</td>
<td valign="middle" align="center">350-2500</td>
<td valign="middle" align="center">3</td>
<td valign="middle" align="center">490</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Dataset 5 (D5) was collected from an agricultural research station in Xinxiang, Henan Province, China (113&#xb0;45&#x2032;40&#x2033;E, 35&#xb0;8&#x2032;11&#x2033;N) (<xref ref-type="fig" rid="f1">
<bold>Figure 1a</bold>
</xref>), and comprises three crop species: potato, soybean, and maize. Among these, soybean and maize were part of breeding trials and were planted in separate plots, with each plot corresponding to a unique cultivar, including 36 cultivars for maize and 151 for soybean (<xref ref-type="fig" rid="f1"><bold>Figure 1b</bold></xref>). Notably, maize was sown in two separate batches, 28 days apart, resulting in distinct growth stages between the two groups of maize plots (<xref ref-type="fig" rid="f1"><bold>Figure 1d</bold></xref>). All crops were managed according to local agricultural practices, with optimal fertilization, pest control, and field maintenance applied. Spectral measurements were conducted on fresh leaves using standardized protocols to ensure data quality and comparability.</p>
<p>Spectral measurements were conducted using an ASD FieldSpec3 spectroradiometer (Analytical Spectral Devices Inc., Boulder, CO, USA) (<xref ref-type="fig" rid="f1"><bold>Figure 1c</bold></xref>), which operates over a spectral range of 350&#x2013;2500 nm with a 1 nm sampling interval. Prior to data collection, the instrument was calibrated with a Spectralon white reference panel to ensure accurate reflectance measurements. All measurements were performed between 10:00 AM and 2:00 PM local time under clear-sky conditions to minimize the effects of solar angles and atmospheric variability. For each crop species, fully expanded, healthy leaves were selected from the upper canopy to maintain consistency and physiological relevance. The adaxial surface of each leaf was positioned perpendicular to the optical fiber probe, and a leaf clip equipped with an internal light source was used to provide stable illumination and reduce ambient light interference. For each leaf, three replicate spectral measurements were acquired and averaged to reduce noise and enhance signal reliability.</p>
<p>CHL was determined using a solvent extraction method with 95% ethanol, and pigment concentrations were quantified spectrophotometrically based on absorbance at 649 nm and 665 nm, following established protocols. Leaf fresh weight and area were measured using an electronic balance and a leaf area meter, respectively. Samples were then oven-dried at 65&#xb0;C for 48 hours to determine dry mass. LMA was calculated as the ratio of dry mass to leaf area, while EWT was computed as the difference between fresh and dry mass, normalized by leaf area. A total of 490 leaf samples were collected for analysis, and the proportion of samples across different crops is shown in <xref ref-type="fig" rid="f1">
<bold>Figure&#xa0;1b</bold>
</xref>.</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>
<bold>(a)</bold> Experiment location of this study, <bold>(b)</bold> proportion of three crop samples measured on site <bold>(c)</bold> spectral acquisition instrument, <bold>(d)</bold> field images of three crops.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1612430-g001.tif">
<alt-text content-type="machine-generated">Map showing an experimental field highlighted in purple on a green background labeled with coordinates (a). A pie chart (b) displays crop distribution: potato 21.99%, soybean 30.96%, and maize 47.05%. Device labeled &#x201c;ASD FieldSpec3&#x201d; (c) and soil images of young maize plants. Three panels (d) depict fields with potato, soybean, and maize in varying growth stages.</alt-text>
</graphic>
</fig>
</sec>
<sec id="s2_2">
<label>2.2</label>
<title>Statistical description of plant traits</title>
<p>The distribution patterns of CHL, EWT, and LMA are illustrated in <xref ref-type="fig" rid="f2">
<bold>Figure&#xa0;2</bold>
</xref>. CHL exhibited significant variation among the datasets, with the highest mean value observed in D5 and the lowest in D4. A similar pattern was observed for EWT, where D5 showed the highest values, while D1 and D4 had comparatively lower values. In contrast, LMA exhibited a distinct distribution trend, with D4 showing the highest values and D3 the lowest, indicating substantial variability in LMA across datasets. Additionally, the correlation coefficients among the traits differ: EWT and LMA exhibit a strong positive correlation (0.66), CHL and LMA demonstrate a moderate negative correlation (-0.31), and CHL and EWT show a relatively weak negative correlation (-0.15).</p>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>
<bold>(a-c)</bold> Distribution of chlorophyll content (CHL), equivalent water thickness (EWT) and leaf mass per area (LMA) for each dataset, <bold>(d)</bold> Pearson correlation coefficient between traits.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1612430-g002.tif">
<alt-text content-type="machine-generated">Four panels display various data visualizations. Panel (a) shows box plots of CHL concentration across five groups (D1 to D5), with variability in distribution. Panel (b) presents box plots of EWT for the same groups, also varying. Panel (c) illustrates box plots of LMA across the groups, with wide distribution ranges. Panel (d) displays a correlation heatmap between CHL, EWT, and LMA, showing correlation values, with a color gradient indicating strength and direction of correlations.</alt-text>
</graphic>
</fig>
</sec>
<sec id="s2_3">
<label>2.3</label>
<title>Radiative transfer model</title>
<p>This study proposes a PROSPECT Pre-training Adversarial Domain Adaptation Network (PPADA-Net), which employs a two-stage training strategy for plant trait prediction (<xref ref-type="fig" rid="f3">
<bold>Figure&#xa0;3</bold>
</xref>). The core idea is to fully leverage large-scale simulated spectral data generated by the PROSPECT physical model for pretraining, followed by adaptive fine-tuning across domains using a limited number of target-domain samples.</p>
<fig id="f3" position="float">
<label>Figure&#xa0;3</label>
<caption>
<p>PPADA-Net framework for plant traits prediction. The pre-trained encoder in Phase 1 undergoes supervised fine-tuning in Phase 2. MLP, Multilayer Perceptron; MSE loss, Mean Squared Error loss; DA loss, Domain Adaptation loss; ACL, Adversarial Contrastive Loss.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1612430-g003.tif">
<alt-text content-type="machine-generated">Flowchart illustrating a two-phase neural network training process. Phase 1 involves pre-training with PROSPECT simulation data, using features like leaf reflectance and solar incidence, processed through a Resnet Encoder. Phase 2 involves supervised training with domain-adversarial neural networks using source and target domain data, incorporating components like MLP and HCDFAs. The HCDFAs head includes a Contrastive Attention Module for aligning feature maps and calculating cosine similarity to enhance cross-domain feature alignment. The goal is to increase similarity between domain representations.</alt-text>
</graphic>
</fig>
<p>The PROSPECT radiative transfer model (version PROSPECT-D) has been employed to generate high-fidelity leaf spectral simulation data (<xref ref-type="bibr" rid="B11">Feret et&#xa0;al., 2017</xref>). Based on physical optics principles, the PROSPECT model simulates leaf reflectance and transmittance spectra across 400&#x2013;2400 nm by coupling anatomical structure parameters with biochemical parameters. Through radiative transfer equations in layered media, this model quantitatively characterizes multiple scattering and absorption effects within leaves, effectively capturing the nonlinear influence of various plant traits on spectral responses. A full-spectrum simulation dataset was established using the PROSPECT-D model to generate 20,000 synthetic spectral-trait pairs for pre-training. The model simulates leaf reflectance and transmittance across 400&#x2013;2400 nm at 1 nm resolution, based on biophysical parameters: CHL (0.1&#x2013;100 &#xb5;g/cm&#xb2;), leaf water depth (0.01&#x2013;0.05 cm), and dry matter content (0.004&#x2013;0.009 g/cm&#xb2;), with additional modulators like total anthocyanin content (1.2-1.8) and leaf structure index (1.0&#x2013;1.9). Leaf water depth and dry matter content can be converted to EWT and LMA, respectively, enabling direct mapping between physical parameters and functional traits. A Latin hypercube sampling strategy ensured uniform parameter sampling, covering biologically plausible ranges (<xref ref-type="table" rid="T2">
<bold>Table&#xa0;2</bold>
</xref>). Each sample comprises a reflectance spectrum (400&#x2013;2400 nm) paired with corresponding CHL, EWT, and LMA values, mimicking diverse ecosystem conditions.</p>
<table-wrap id="T2" position="float">
<label>Table&#xa0;2</label>
<caption>
<p>Overview of the PROSPECT-D input variables of plants.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="left">Variable Name</th>
<th valign="middle" align="center">Symbol</th>
<th valign="middle" align="center">Unit</th>
<th valign="middle" align="center">Typical Range</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">Leaf structure index</td>
<td valign="middle" align="center">
<italic>N</italic>
</td>
<td valign="middle" align="center">Unitless</td>
<td valign="middle" align="center">1.0-1.9</td>
</tr>
<tr>
<td valign="middle" align="left">Chlorophyll <inline-formula>
<mml:math display="inline" id="im1">
<mml:mrow>
<mml:mi>a</mml:mi>
<mml:mo>+</mml:mo>
<mml:mtext>b</mml:mtext>
</mml:mrow>
</mml:math>
</inline-formula> content</td>
<td valign="middle" align="center">
<inline-formula>
<mml:math display="inline" id="im2">
<mml:mrow>
<mml:msub>
<mml:mi>C</mml:mi>
<mml:mrow>
<mml:mi>a</mml:mi>
<mml:mi>b</mml:mi>
</mml:mrow>
</mml:msub>
<mml:mo stretchy="false">/</mml:mo>
<mml:mi>L</mml:mi>
<mml:mi>C</mml:mi>
<mml:mi>C</mml:mi>
</mml:mrow>
</mml:math>
</inline-formula>
</td>
<td valign="middle" align="center">
<inline-formula>
<mml:math display="inline" id="im3">
<mml:mrow>
<mml:mi>&#x3bc;</mml:mi>
<mml:mtext>g</mml:mtext>
<mml:mo stretchy="false">/</mml:mo>
<mml:msup>
<mml:mrow>
<mml:mtext>cm</mml:mtext>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:math>
</inline-formula>
</td>
<td valign="middle" align="center">0.1-100</td>
</tr>
<tr>
<td valign="middle" align="left">Total carotenoid content</td>
<td valign="middle" align="center">
<inline-formula>
<mml:math display="inline" id="im4">
<mml:mrow>
<mml:msub>
<mml:mi>C</mml:mi>
<mml:mrow>
<mml:mi>c</mml:mi>
<mml:mi>x</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>
</td>
<td valign="middle" align="center">
<inline-formula>
<mml:math display="inline" id="im5">
<mml:mrow>
<mml:mi>&#x3bc;</mml:mi>
<mml:mtext>g</mml:mtext>
<mml:mo stretchy="false">/</mml:mo>
<mml:msup>
<mml:mrow>
<mml:mtext>cm</mml:mtext>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:math>
</inline-formula>
</td>
<td valign="middle" align="center">1.0-25.0</td>
</tr>
<tr>
<td valign="middle" align="left">Total anthocyanin content</td>
<td valign="middle" align="center">
<inline-formula>
<mml:math display="inline" id="im6">
<mml:mrow>
<mml:msub>
<mml:mi>C</mml:mi>
<mml:mrow>
<mml:mi>a</mml:mi>
<mml:mi>n</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>
</td>
<td valign="middle" align="center">
<inline-formula>
<mml:math display="inline" id="im7">
<mml:mrow>
<mml:mi>&#x3bc;</mml:mi>
<mml:mtext>g</mml:mtext>
<mml:mo stretchy="false">/</mml:mo>
<mml:msup>
<mml:mrow>
<mml:mtext>cm</mml:mtext>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:math>
</inline-formula>
</td>
<td valign="middle" align="center">1.2-1.8</td>
</tr>
<tr>
<td valign="middle" align="left">Brown pigments</td>
<td valign="middle" align="center">
<inline-formula>
<mml:math display="inline" id="im8">
<mml:mrow>
<mml:msub>
<mml:mi>C</mml:mi>
<mml:mrow>
<mml:mtext>bp&#xa0;</mml:mtext>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>
</td>
<td valign="middle" align="center">Unitless</td>
<td valign="middle" align="center">0.01-1.0</td>
</tr>
<tr>
<td valign="top" align="left">Dry matter content</td>
<td valign="middle" align="center">
<inline-formula>
<mml:math display="inline" id="im9">
<mml:mrow>
<mml:msub>
<mml:mi>C</mml:mi>
<mml:mi>m</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>
</td>
<td valign="middle" align="center">
<inline-formula>
<mml:math display="inline" id="im10">
<mml:mrow>
<mml:mtext>g</mml:mtext>
<mml:mo stretchy="false">/</mml:mo>
<mml:msup>
<mml:mrow>
<mml:mtext>cm</mml:mtext>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:math>
</inline-formula>
</td>
<td valign="middle" align="center">0.004 &#x2013; 0.009</td>
</tr>
<tr>
<td valign="top" align="left">Leaf water depth</td>
<td valign="middle" align="center">
<inline-formula>
<mml:math display="inline" id="im11">
<mml:mrow>
<mml:msub>
<mml:mi>C</mml:mi>
<mml:mi>w</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>
</td>
<td valign="middle" align="center">cm</td>
<td valign="middle" align="center">0.01 - 0.05</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s2_4">
<label>2.4</label>
<title>Proposed PPADA-net</title>
<p>The simulated spectra are fed into a ResNet-based encoder network to extract high-level spectral features (<xref ref-type="fig" rid="f4">
<bold>Figure&#xa0;4</bold>
</xref>). The architecture processes spectral inputs through a hierarchical feature extraction pipeline. The hyperspectral reflectance data consisted of 2000 bands (400&#x2013;2400 nm at 1 nm resolution). To enable compatibility with ResNet, the 1D spectra were reshaped into 2D arrays of size 224&#xd7;224 and duplicated across three channels to form 224&#xd7;224&#xd7;3 tensors. This operation preserves spectral information and allows the use of 2D convolutional layers, which are effective in capturing local and hierarchical patterns. This transformation is purely structural and preserves the original spectral information. The network commences with a 7&#xd7;7 convolutional layer (64 filters, stride=2) followed by 3&#xd7;3 max-pooling, establishing preliminary spatial-spectral representations. Subsequent residual blocks employ bottleneck structures with cascaded 1&#xd7;1 and 3&#xd7;3 convolutions, progressively expanding channel dimensions from 64 to 1024 through four major stages. Each stage contains multiple identity-short cut blocks where 1&#xd7;1 convolutions perform channel dimension matching, while 3&#xd7;3 convolutions extract spatially invariant features. Notably, the architecture implements channel scaling factors of &#xd7;4 between stages (64&#x2192;256&#x2192;512&#x2192;1024), maintaining computational efficiency through bottleneck compression. The deep stack of 21 convolutional layers leverages residual connections to preserve gradient flow, with feature map spatial resolution systematically reduced through stride convolutions in transitional blocks. This design enables effective learning of multiscale spectral-spatial correlations while mitigating vanishing gradient issues inherent to deep networks. At this stage, a fully connected regression head is appended to the encoder output. The final encoder outputs high-dimensional latent representations suitable for downstream regression tasks through attached task-specific heads. We optimize the network by minimizing the mean squared error (MSE) loss function (<xref ref-type="bibr" rid="B21">LeCun et&#xa0;al., 2015</xref>) to predict LMA, EWT, and CHL values, with gradient backpropagation throughout the network to learn latent representations strongly correlated with plant traits from the large-scale simulated data. The MSE LOSS function is defined in <xref ref-type="disp-formula" rid="eq1">Equation 1</xref>.</p>
<fig id="f4" position="float">
<label>Figure&#xa0;4</label>
<caption>
<p>A schematic illustration of 2D ResNet-18 architectures used for yield prediction in this study. Each convolutional layer is followed by batch normalization and a ReLU.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1612430-g004.tif">
<alt-text content-type="machine-generated">Flowchart illustrating a neural network architecture. It begins with input spectra reshaped for convolution operations, followed by several layers of convolution and max pooling. An identity skip-connection is shown, involving batch normalization, ReLU, and convolutions, culminating in feature maps. The network progressively increases the depth and width of convolutions across stages.</alt-text>
</graphic>
</fig>
<disp-formula id="eq1">
<label>(1)</label>
<mml:math display="block" id="M1">
<mml:mrow>
<mml:mtable equalrows="true" equalcolumns="true">
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:msub>
<mml:mi mathvariant="script">L</mml:mi>
<mml:mi>m</mml:mi>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mn>1</mml:mn>
<mml:mi>M</mml:mi>
</mml:mfrac>
<mml:mstyle displaystyle="true">
<mml:mrow>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>M</mml:mi>
</mml:msubsup>
</mml:mrow>
</mml:mstyle>
<mml:msup>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>,</mml:mo>
<mml:mi>m</mml:mi>
</mml:mrow>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mrow>
<mml:mover>
<mml:mi>y</mml:mi>
<mml:mo>&#x2c6;</mml:mo>
</mml:mover>
</mml:mrow>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>,</mml:mo>
<mml:mi>m</mml:mi>
</mml:mrow>
</mml:msub>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:mrow>
</mml:math>
</disp-formula>
<p>where m represents the trait index, and M represents the number of samples. The loss for combining all traits is defined in <xref ref-type="disp-formula" rid="eq2">Equation 2</xref>.</p>
<disp-formula id="eq2">
<label>(2)</label>
<mml:math display="block" id="M2">
<mml:mrow>
<mml:mtable equalrows="true" equalcolumns="true">
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:msub>
<mml:mi mathvariant="script">L</mml:mi>
<mml:mrow>
<mml:mtext>downstream&#xa0;tasks&#xa0;</mml:mtext>
</mml:mrow>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mstyle displaystyle="true">
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>m</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>T</mml:mi>
</mml:msubsup>
<mml:mrow>
<mml:msub>
<mml:mi mathvariant="script">L</mml:mi>
<mml:mi>m</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mstyle>
</mml:mrow>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:mrow>
</mml:math>
</disp-formula>
<p>Following pre-training, the second stage involves jointly feeding both the source domain (real-world dataset for training) and limited target domain (real-world dataset for prediction) into the pre-trained encoder. This strategy leverages the spectral feature representation capabilities acquired from large-scale simulated data while enabling fine-tuning and domain alignment through limited target domain samples, thereby enhancing prediction accuracy in real-world target environments. To simultaneously address plant trait prediction and domain adaptation, we introduce two parallel task heads at the output of the pre-trained encoder: (1) A Multilayer Perceptron (MLP) for regressing three plant traits (LMA, EWT, and CHL) as the downstream task, optimized using MSE loss. (2) A Hierarchical Cross-Domain Feature Alignment (HCDFA) head, this module takes input feature maps from both the source domain and target domain and processes them through a two-step mechanism. First, a Contrastive Attention Module calculates the cosine similarity between the input feature maps, emphasizing shared patterns while suppressing domain-specific noise. This process generates weighted feature maps for both domains. Subsequently, an adversarial contrastive loss ACL is applied to refine these weighted feature maps, increasing the similarity between the aligned feature representations of the source and target domains. These similarity weights are applied to the source domain feature map, dynamically adjusting its feature distribution to produce the aligned source domain feature map. Simultaneously, the target domain features undergo adaptive mapping to generate the aligned target domain feature map. During forward propagation, the HCDFA acts as an identity transform, while in backward propagation it inverts gradients from the DA loss, thereby encouraging the encoder to learn domain-invariant features through adversarial confusion. The composite objective function during this stage comprises two components is defined in <xref ref-type="disp-formula" rid="eq3">Equation 3</xref>.</p>
<disp-formula id="eq3">
<label>(3)</label>
<mml:math display="block" id="M3">
<mml:mrow>
<mml:mtable equalrows="true" equalcolumns="true">
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:mi>L</mml:mi>
<mml:mo>=</mml:mo>
<mml:msub>
<mml:mi>&#x3bb;</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
<mml:msub>
<mml:mi mathvariant="script">L</mml:mi>
<mml:mrow>
<mml:mi>M</mml:mi>
<mml:mi>S</mml:mi>
<mml:mi>E</mml:mi>
</mml:mrow>
</mml:msub>
<mml:mo>+</mml:mo>
<mml:msub>
<mml:mi>&#x3bb;</mml:mi>
<mml:mn>2</mml:mn>
</mml:msub>
<mml:msub>
<mml:mi mathvariant="script">L</mml:mi>
<mml:mrow>
<mml:mi>D</mml:mi>
<mml:mi>A</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:mrow>
</mml:math>
</disp-formula>
<p>
<inline-formula>
<mml:math display="inline" id="im12">
<mml:mrow>
<mml:msub>
<mml:mi mathvariant="script">L</mml:mi>
<mml:mrow>
<mml:mi>M</mml:mi>
<mml:mi>S</mml:mi>
<mml:mi>E</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> represents the MSE loss for plant trait regression, while <inline-formula>
<mml:math display="inline" id="im13">
<mml:mrow>
<mml:msub>
<mml:mi mathvariant="script">L</mml:mi>
<mml:mrow>
<mml:mi>D</mml:mi>
<mml:mi>A</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> denotes the domain adaptation loss. &#x3bb;<sub>1</sub> and &#x3bb;<sub>2</sub> are hyperparameters that control the weighting of the two loss terms, and their optimal balance can be determined through hyperparameter tuning on the validation set.</p>
</sec>
<sec id="s2_5">
<label>2.5</label>
<title>Training parameters</title>
<p>During network training, stochastic gradient descent is used to update the parameters of both the encoder and the two task heads in mini batches. The Few-shot learning strategy incorporates a small amount of labeled target-domain data into the network alongside source-domain data. A feature alignment mechanism mitigates domain shift, facilitating cross-domain knowledge transfer between datasets. Model training was conducted using a staged optimization strategy. In the first stage, pre-training was conducted using PROSPECT-D-generated spectral-trait pairs to initialize the ResNet-based encoder. The network, structured with a 7&#xd7;7 convolutional layer (64 filters, stride=2), 3&#xd7;3 max-pooling, and residual blocks scaling channels from 64 to 1024, was trained for 200 epochs with a batch size of 64. The Adam optimizer was used with an initial learning rate of 0.001 for rapid convergence, decaying by 1&#xd7;10&#x2013;5 in later stages to prevent overfitting. The MSE loss function guided optimization, enabling the encoder to learn spectral features strongly correlated with CHL, EWT, and LMA, forming a robust foundation for subsequent domain adaptation. In the second stage of transfer learning, under the integration of source and target domain data, the number of epochs was adjusted to 100 with a batch size of 32, and the initial learning rate was updated to 1&#xd7;10^-4 to facilitate more refined fine-tuning and domain adaptation.</p>
</sec>
<sec id="s2_6">
<label>2.6</label>
<title>Comparison of prediction models and verification strategies</title>
<p>Four regression methodologies were systematically compared for CHL, EWT, and LMA prediction: (1) conventional multivariate PLSR, (2) data-driven ResNet, along with two ablated variants, (3) physics-enhanced ResNet-PROSPECT and (4) domain-adaptive ResNet-GRL. These were rigorously benchmarked against our proposed PPADA-Net to evaluate the incremental benefits of integrated physical-adversarial learning.</p>
<p>As a conventional multivariate statistical approach, PLSR establishes linear relationships between hyperspectral reflectance (predictors) and plant traits (response variables) through latent variable decomposition. This model maps the relationship between spectral data and plant traits to a low-dimensional latent space, allowing PLSR to perform effective regression analysis while maintaining low computational cost, representing traditional chemometrics methodology in spectral analysis (<xref ref-type="bibr" rid="B33">Wold et&#xa0;al., 2001</xref>).</p>
<p>The standard ResNet18 architecture was adapted for spectral regression, employing residual blocks with 1D convolutions to capture hierarchical spectral features. Without any domain adaptation mechanisms, this deep learning baseline utilized raw spectral inputs (400&#x2013;2400 nm) to directly predict trait values through fully connected regression layers, demonstrating pure data-driven modeling capability (<xref ref-type="bibr" rid="B6">Chen et&#xa0;al., 2022</xref>).</p>
<p>ResNet-PROSPECT integrated physics-informed pretraining by initializing weights through simulated data generated from the PROSPECT-D radiative transfer model. The network first underwent 100-epoch pretraining on 10000 synthetic spectra-trait pairs covering the full parameter space, followed by fine-tuning on experimental datasets, testing the isolated effect of physical prior integration.</p>
<p>The ResNet-GRL architecture implements domain adversarial learning without physical constraints by appending a GRL between the feature extractor and the domain classifier. This dual-objective network simultaneously minimizes trait prediction error and maximizes domain confusion through adversarial training (&#x3bb; = 0.3), thereby evaluating the independent contribution of domain-invariant feature learning.</p>
</sec>
<sec id="s2_7">
<label>2.7</label>
<title>Performance evaluation</title>
<p>Two distinct validation paradigms were implemented to comprehensively assess model performance: (1) Aggregated Cross-Validation (<xref ref-type="fig" rid="f5">
<bold>Figure&#xa0;5a</bold>
</xref>): All samples from the five datasets (D1-D5) were pooled into a composite repository, followed by a stratified five-fold cross-validation scheme, where each fold maintained proportional representation of the original dataset distributions. In each iteration, 80% of the samples were allocated for training (with 15% reserved for internal validation to facilitate early stopping) and 20% for testing. This approach assessed general predictive accuracy under the assumption of homogeneous data distribution. (2) Cross-Dataset Validation (<xref ref-type="fig" rid="f5">
<bold>Figure&#xa0;5b</bold>
</xref>): To rigorously evaluate cross-domain transferability, leave-one-dataset-out experiments were conducted. In each trial, four datasets were used as the training set, while the remaining dataset was held out for independent testing. Spatial-spectral standardization was applied to each dataset using the respective training statistics to prevent information leakage. This protocol specifically quantified the model&#x2019;s generalization capacity across heterogeneous data acquisition conditions. (3) Training on D1&#x2013;D4 with Testing on D5 (<xref ref-type="fig" rid="f5">
<bold>Figure&#xa0;5c</bold>
</xref>): Datasets D1&#x2013;D4 were used as the training set, and the model&#x2019;s predictive performance was tested on the independent, independently collected field dataset D5. This validation methodology is particularly valuable for assessing model robustness and practical applicability in real-world agricultural settings.</p>
<fig id="f5" position="float">
<label>Figure&#xa0;5</label>
<caption>
<p>Validation methodologies for plant trait prediction models. <bold>(a)</bold> Combined samples from five datasets (D1&#x2013;D5) were subjected to stratified five-fold cross-validation. <bold>(b)</bold> Each dataset was used as the validation set in turn, with another single dataset used for training, ensuring robustness across independent datasets. <bold>(c)</bold> D5 was used as the testing set, and the remaining four datasets (D1&#x2013;D4) were used as the training set.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1612430-g005.tif">
<alt-text content-type="machine-generated">Panel (a) shows a dataset merging process where five datasets combine, with 20% reserved for testing and the rest for training. Panel (b) illustrates a mapping between a source and target dataset, both consisting of five datasets. Panel (c) displays five datasets classified as training, while dataset five is the testing set.</alt-text>
</graphic>
</fig>
<p>In the model evaluation, to measure the variability of the dependent variable and prediction error, the normalized root means square error (nRMSE) and the coefficient of determination (R<sup>2</sup>) are formulated in <xref ref-type="disp-formula" rid="eq4">Equations 4</xref>, <xref ref-type="disp-formula" rid="eq5">5</xref>.</p>
<disp-formula id="eq4">
<label>(4)</label>
<mml:math display="block" id="M4">
<mml:mrow>
<mml:msup>
<mml:mtext>R</mml:mtext>
<mml:mn>2</mml:mn>
</mml:msup>
<mml:mo>=</mml:mo>
<mml:mn>1</mml:mn>
<mml:mo>&#x2212;</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>n</mml:mi>
</mml:msubsup>
<mml:msup>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:msub>
<mml:mover accent="true">
<mml:mi>y</mml:mi>
<mml:mo>^</mml:mo>
</mml:mover>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
<mml:mrow>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>n</mml:mi>
</mml:msubsup>
<mml:msup>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:mover accent="true">
<mml:mi>y</mml:mi>
<mml:mo>&#xaf;</mml:mo>
</mml:mover>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:mfrac>
</mml:mrow>
</mml:math>
</disp-formula>
<disp-formula id="eq5">
<label>(5)</label>
<mml:math display="block" id="M5">
<mml:mrow>
<mml:mtable equalrows="true" equalcolumns="true">
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:mtext>nRMSE&#xa0;</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mn>1</mml:mn>
<mml:mi>n</mml:mi>
</mml:mfrac>
<mml:msqrt>
<mml:mrow>
<mml:mfrac>
<mml:mn>1</mml:mn>
<mml:mover accent="true">
<mml:mi>y</mml:mi>
<mml:mo>&#xaf;</mml:mo>
</mml:mover>
</mml:mfrac>
<mml:mstyle displaystyle="true">
<mml:munderover>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>n</mml:mi>
</mml:munderover>
</mml:mstyle>
<mml:msup>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:msub>
<mml:mover accent="true">
<mml:mi>y</mml:mi>
<mml:mo>^</mml:mo>
</mml:mover>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:msqrt>
</mml:mrow>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:mrow>
</mml:math>
</disp-formula>
<p>where n is the number of samples; <inline-formula>
<mml:math display="inline" id="im14">
<mml:mrow>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> and <inline-formula>
<mml:math display="inline" id="im15">
<mml:mrow>
<mml:msub>
<mml:mover accent="true">
<mml:mi>y</mml:mi>
<mml:mo>^</mml:mo>
</mml:mover>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> represent the measured and the predicted grain yield of sample i, respectively. A higher value of R<sup>2</sup> and lower nRMSE values would indicate superior performance of the model.</p>
</sec>
</sec>
<sec id="s3" sec-type="results">
<label>3</label>
<title>Results</title>
<sec id="s3_1">
<label>3.1</label>
<title>Statistical analysis of data</title>
<p>Statistical analyses of spectral reflectance across the 400&#x2013;2400 nm range are illustrated in <xref ref-type="fig" rid="f6">
<bold>Figure&#xa0;6a</bold>
</xref>. Consistent patterns were observed across the five datasets, with all exhibiting identifiable spectral peaks and troughs. In the visible region (500&#x2013;700 nm), mean reflectance values remained low and relatively stable, reflecting strong pigment absorption. A sharp increase occurred in the near-infrared (NIR, 700&#x2013;1300 nm), consistent with the high reflectance associated with internal leaf structure. This was followed by a fluctuating decline in the short-wave infrared (SWIR, 1300&#x2013;2400 nm), where a pronounced dip was observed between 1900 and 2100 nm&#x2014;likely due to strong water absorption features. Inter-dataset differences were especially evident in specific spectral bands. For instance, the standard deviation of reflectance was notably higher in the visible spectrum, indicating greater variability among datasets in this region. Conversely, variability declined in the NIR and SWIR regions, particularly around 1000&#x2013;1300 nm and 1900&#x2013;2100 nm, as shown by the narrower spread in standard deviation and coefficient of variation (CV) curves.</p>
<fig id="f6" position="float">
<label>Figure&#xa0;6</label>
<caption>
<p>
<bold>(a)</bold> Mean, standard deviation and coefficient of variation of spectral reflectance. <bold>(b)</bold> Pearson correlation coefficient between spectral reflectance and traits.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1612430-g006.tif">
<alt-text content-type="machine-generated">Graphs illustrating spectral analysis across different wavelengths:  (a) Displays mean reflectance, standard deviation, and coefficient of variation for datasets D1 to D5, ranging from 500 to 2500 nm.  (b) Shows correlation coefficients for CHL, EWT, and LMA against wavelengths from 500 to 2500 nm, with fluctuating patterns for each parameter.</alt-text>
</graphic>
</fig>
<p>
<xref ref-type="fig" rid="f6">
<bold>Figure&#xa0;6b</bold>
</xref> presents the Pearson correlation between spectral reflectance and plant traits. For CHL, the strongest correlations were found in the visible spectrum (400&#x2013;700 nm), aligning with known pigment absorption features. Correlation strength diminished in the NIR and SWIR regions, generally falling below 0.4 except in the 1900&#x2013;2400 nm sub-region. In contrast, EWT exhibited weak associations in the visible range, but showed moderate to strong correlations (r &gt; 0.5) throughout the 700&#x2013;1900 nm range, underscoring its sensitivity to water-related absorption features in those bands. For LMA, positive correlations peaked in the 700&#x2013;1400 nm range, suggesting a strong relationship between biomass-related traits and reflectance in this region. Trait&#x2013;spectrum relationships also varied across datasets. For example, Dataset D2 exhibited higher LMA correlations in the NIR, while Dataset D5 consistently showed weaker correlations for EWT, reflecting the combined effects of biological variability and environmental conditions. These results emphasize the importance of accounting for both spectral region characteristics and dataset heterogeneity in trait modeling.</p>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Comparison of models</title>
<sec id="s3_2_1">
<label>3.2.1</label>
<title>Visualization of learned representation</title>
<p>A t-SNE visualization of feature distributions from multiple datasets is presented in <xref ref-type="fig" rid="f7">
<bold>Figure&#xa0;7</bold>
</xref>, comparing features extracted by ResNet with those processed by PPADA-Net using adversarial domain adaptation. The left panel displays ResNet-derived features, where color-coded data points from different datasets form distinct, well-separated clusters with minimal overlap. This sharp separation arises from domain-specific biases in spectral reflectance patterns, leading to distributional mismatches that hinder cross-dataset prediction accuracy in transfer learning. In contrast, the right panel illustrates features processed by PPADA-Net, where adversarial learning induces two key transformations: (1) inter-dataset boundaries become less distinct, with most data points forming mixed-region neighborhoods, and (2) previously compact clusters disperse into overlapping distributions, particularly in high-dimensional manifolds associated with invariant spectral features. These structural adjustments indicate that PPADA-Net effectively mitigates domain shifts by learning transfer-invariant representations, enhancing knowledge transfer across heterogeneous datasets. This alignment mechanism underpins PPADA-Net&#x2019;s superior cross-domain generalization performance.</p>
<fig id="f7" position="float">
<label>Figure&#xa0;7</label>
<caption>
<p>t-SNE visualization of features extracted from different domain datasets.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1612430-g007.tif">
<alt-text content-type="machine-generated">Two t-SNE visualizations compare dataset domains before and after domain adaptation. The left plot shows ResNet features with clusters largely separated by color. The right plot displays PPADA-Net features where the clusters are more blended, indicating adaptation. Both plots include a legend with five dataset categories.</alt-text>
</graphic>
</fig>
</sec>
<sec id="s3_2_2">
<label>3.2.2</label>
<title>Traits prediction performance</title>
<p>To evaluate the impact of synthetic data volume, the model was pretrained using varying amounts of PROSPECT-generated spectra (2k, 4k, 6k,&#x2026;, 20k). As shown in <xref ref-type="fig" rid="f8">
<bold>Figure&#xa0;8</bold>
</xref>, predictive accuracy increased with the size of the synthetic dataset. The improvement was particularly notable for CHL and EWT, indicating that PROSPECT provides informative priors related to canopy chlorophyll and water status. Although performance plateaued or showed minor fluctuations beyond 10,000 samples for certain traits, this data volume represents a practical trade-off between model accuracy and computational efficiency.</p>
<fig id="f8" position="float">
<label>Figure&#xa0;8</label>
<caption>
<p>Model performance on three plant traits (CHL, EWT, and LMA) with varying numbers of PROSPECT-simulated spectra used during pretraining.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1612430-g008.tif">
<alt-text content-type="machine-generated">Two line graphs display the relationship between the number of simulated spectra and performance metrics for CHL, EWT, and LMA. The top graph shows R&#xb2; values increasing with simulated spectra, with LMA peaking highest. The bottom graph shows nRMSE values decreasing as simulated spectra increase, with LMA having the lowest values.</alt-text>
</graphic>
</fig>
<p>This study evaluates the performance of various spectral reflectance prediction models for three plant traits. As illustrated in <xref ref-type="fig" rid="f9">
<bold>Figure&#xa0;9a</bold>
</xref>, the models exhibit significant differences in R&#xb2; and nRMSE. Overall, the traditional machine learning model PLSR demonstrates relatively low accuracy, with average R&#xb2; values of 0.59, 0.63, and 0.72 for CHL, EWT, and LMA, respectively. In contrast, the deep learning based ResNet improves the R&#xb2; for CHL to 0.63 (an increase of 6.8%) via nonlinear feature extraction, although its EWT prediction slightly degrades (R&#xb2; = 0.61 versus 0.63), underscoring the limitations of purely data driven, end to end training for cross trait generalization. Notably, incorporating physical priors and domain adaptation strategies significantly enhances model performance. For example, ResNet-PROSPECT, pretrained on simulated data generated by the PROSPECT radiative transfer model, attains R&#xb2; values of 0.67, 0.74, and 0.80 for CHL, EWT, and LMA, respectively, which represents an average improvement of 8.3% over the basic ResNet and shows the most substantial error reduction in the EWT prediction (nRMSE = 0.09 versus 0.15). Furthermore, ResNet-GRL employs adversarial learning to align features between the source and target domains, yielding a marginal improvement in CHL prediction (R&#xb2; = 0.68 versus 0.67) but slightly inferior LMA performance compared to ResNet-PROSPECT (R&#xb2; = 0.78 versus 0.80), suggesting that pretraining with physically simulated data is more advantageous for certain traits, such as LMA. The dual strategy PPADA-Net, which integrates physical constraints and domain adaptation, achieves the best overall performance by increasing the R&#xb2; values for CHL, EWT, and LMA to 0.72, 0.77, and 0.86, respectively, which represents an average enhancement of 5.1% over single strategy models and demonstrates balanced improvements across different traits. From the perspective of individual traits, LMA exhibits the highest prediction accuracy (PPADA-Net R&#xb2; = 0.86), likely due to a more pronounced physical association between its spectral characteristics and dry matter content. The variation in prediction performance across traits aligns with spectral-trait correlations in Section 3.1. LMA exhibits the strongest correlations, especially in the NIR region (700&#x2013;1400 nm), with stable reflectance and strong biomass associations. EWT displays moderate to strong correlations over a slightly narrower range (700&#x2013;1900 nm). In contrast, CHL exhibits strong correlations only in the narrow visible range (400&#x2013;700 nm). These correlation patterns explain why LMA consistently achieves the highest prediction accuracy, followed by EWT, with CHL showing the lowest performance. This underscores the importance of intrinsic spectral sensitivity and the information content available for each trait in determining model accuracy.</p>
<fig id="f9" position="float">
<label>Figure&#xa0;9</label>
<caption>
<p>
<bold>(a)</bold> Accuracy evaluation of the PLSR, ResNet, ResNet-PROSPECT, ResNet-GRL and PPADA-Net traits prediction models. The error bar represents the standard deviation of the validation. <bold>(b)</bold> uses horizontal bars to compare models like CNN, CNN-PROSPECT, CNN-HCDFA, and Transformers against metrics R&#xb2; and nRMSE. CHL, Chlorophyll Content; EWT, Equivalent Water Thickness; LMA, Leaf Mass per Area.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1612430-g009.tif">
<alt-text content-type="machine-generated">Bar charts and horizontal bar comparisons visualize model performance metrics across different methods. Part (a) shows bar charts for CHL, EWT, and LMA with R&#xb2; and nRMSE values for PLSR, Resnet, Resnet-PROSPECT, Resnet-GRL, and PPADA-Net. Part (b) uses horizontal bars to compare models like CNN, CNN-PROSPECT, CNN-HCDFA, and Transformers against metrics R&#xb2; and nRMSE for CHL, EWT, and LMA, with blue representing R&#xb2; and green for nRMSE.</alt-text>
</graphic>
</fig>
<p>The performance of each architecture under three settings: Baseline, PROSPECT pretraining, and with the HCDFA module was analyzed in <xref ref-type="fig" rid="f9">
<bold>Figure&#xa0;9b</bold>
</xref>. Overall, ResNet achieved the best prediction accuracy across the three target traits, followed closely by Transformer, while CNN showed a notable performance drop. In ablation analysis, both PROSPECT pretraining and the HCDFA module led to performance improvements across all architectures. When both modules were combined, further accuracy gains were observed, suggesting that PROSPECT provides beneficial physical priors, and the domain alignment strategy of HCDFA effectively mitigates cross-ecosystem trait heterogeneity.</p>
</sec>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Transferability validation across different datasets</title>
<p>The performance of the PLSR model and the proposed PPADA-Net in predicting three plant traits was evaluated (<xref ref-type="fig" rid="f10">
<bold>Figure&#xa0;10</bold>
</xref>) across various training-testing dataset combinations (<xref ref-type="table" rid="T3"><bold>Tables 3</bold></xref>, <xref ref-type="table" rid="T4"><bold>4</bold></xref>). PPADA-Net consistently outperformed the PLSR model across most dataset combinations for all three traits. On average, PPADA-Net exhibited higher R&#xb2; values and lower nRMSE values than PLSR. For instance, in predicting CHL, PPADA-Net achieved a mean R&#xb2; of 0.53 and a mean nRMSE of 0.12 across all dataset combinations, whereas PLSR attained a mean R&#xb2; of 0.18 and a mean nRMSE of 0.25. Similarly, PPADA-Net achieved an average R&#xb2; of 0.72 and an nRMSE of 0.10 in predicting LMA, whereas PLSR attained an R&#xb2; of 0.46 and an nRMSE of 0.15. This indicates that PPADA-Net, which integrates simulated data pre-training with adversarial domain alignment, significantly enhances prediction accuracy and reliability.</p>
<fig id="f10" position="float">
<label>Figure&#xa0;10</label>
<caption>
<p>
<bold>(a, c)</bold> Cross validation of the PLSR and PPADA-Net traits prediction models (R<sup>2</sup> and nRMSE). D1&#x2013;D2 means that Dataset 1 is the training set and Dataset 2 is the test set, <bold>(b, d)</bold> the kernel density estimate (KDE) of the trait-based metric dis tributions (R2 and nRMSE).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1612430-g010.tif">
<alt-text content-type="machine-generated">Four-panel image containing bar charts and density plots. Panel (a) displays R&#xb2; values for various models: CHL-PLSR, CHL-PPADA, EWT-PLSR, EWT-PPADA, LMA-PLSR, and LMA-PPADA with multiple datasets (D1-D2 to D5-D4). Panel (b) shows density distribution of R&#xb2; for these models. Panel (c) illustrates nRMSE values with the same models and datasets, and panel (d) presents density distribution of nRMSE. Each model and dataset is color-coded with a legend on the side.</alt-text>
</graphic>
</fig>
<table-wrap id="T3" position="float">
<label>Table&#xa0;3</label>
<caption>
<p>The performance results of plant traits prediction at five growth stages using PLSR and PPADA model.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Group</th>
<th valign="middle" align="center">CHL-PLSR</th>
<th valign="middle" align="center">CHL-PPADA</th>
<th valign="middle" align="center">EWT-PLSR</th>
<th valign="middle" align="center">EWT-PPADA</th>
<th valign="middle" align="center">LMA-PLSR</th>
<th valign="middle" align="center">LMA-PPADA</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">D1-D2</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.53</td>
<td valign="middle" align="left">0.18</td>
<td valign="middle" align="left">0.74</td>
<td valign="middle" align="left">0.34</td>
<td valign="middle" align="left">0.68</td>
</tr>
<tr>
<td valign="middle" align="left">D1-D3</td>
<td valign="middle" align="left">0.05</td>
<td valign="middle" align="left">0.58</td>
<td valign="middle" align="left">0.27</td>
<td valign="middle" align="left">0.81</td>
<td valign="middle" align="left">0.29</td>
<td valign="middle" align="left">0.76</td>
</tr>
<tr>
<td valign="middle" align="left">D1-D4</td>
<td valign="middle" align="left">0.22</td>
<td valign="middle" align="left">0.59</td>
<td valign="middle" align="left">0.38</td>
<td valign="middle" align="left">0.77</td>
<td valign="middle" align="left">0.22</td>
<td valign="middle" align="left">0.62</td>
</tr>
<tr>
<td valign="middle" align="left">D1-D5</td>
<td valign="middle" align="left">0.21</td>
<td valign="middle" align="left">0.58</td>
<td valign="middle" align="left">0.3</td>
<td valign="middle" align="left">0.69</td>
<td valign="middle" align="left">0.23</td>
<td valign="middle" align="left">0.67</td>
</tr>
<tr>
<td valign="middle" align="left">D2-D1</td>
<td valign="middle" align="left">0.14</td>
<td valign="middle" align="left">0.47</td>
<td valign="middle" align="left">0.49</td>
<td valign="middle" align="left">0.77</td>
<td valign="middle" align="left">0.53</td>
<td valign="middle" align="left">0.75</td>
</tr>
<tr>
<td valign="middle" align="left">D2-D3</td>
<td valign="middle" align="left">0.02</td>
<td valign="middle" align="left">0.54</td>
<td valign="middle" align="left">0.56</td>
<td valign="middle" align="left">0.81</td>
<td valign="middle" align="left">0.42</td>
<td valign="middle" align="left">0.74</td>
</tr>
<tr>
<td valign="middle" align="left">D2-D4</td>
<td valign="middle" align="left">0.08</td>
<td valign="middle" align="left">0.6</td>
<td valign="middle" align="left">0.39</td>
<td valign="middle" align="left">0.78</td>
<td valign="middle" align="left">0.44</td>
<td valign="middle" align="left">0.78</td>
</tr>
<tr>
<td valign="middle" align="left">D2-D5</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.59</td>
<td valign="middle" align="left">0.35</td>
<td valign="middle" align="left">0.7</td>
<td valign="middle" align="left">0.41</td>
<td valign="middle" align="left">0.77</td>
</tr>
<tr>
<td valign="middle" align="left">D3-D1</td>
<td valign="middle" align="left">0.33</td>
<td valign="middle" align="left">0.34</td>
<td valign="middle" align="left">0.34</td>
<td valign="middle" align="left">0.66</td>
<td valign="middle" align="left">0.53</td>
<td valign="middle" align="left">0.67</td>
</tr>
<tr>
<td valign="middle" align="left">D3-D2</td>
<td valign="middle" align="left">0.41</td>
<td valign="middle" align="left">0.43</td>
<td valign="middle" align="left">0.42</td>
<td valign="middle" align="left">0.59</td>
<td valign="middle" align="left">0.63</td>
<td valign="middle" align="left">0.61</td>
</tr>
<tr>
<td valign="middle" align="left">D3-D4</td>
<td valign="middle" align="left">0.29</td>
<td valign="middle" align="left">0.51</td>
<td valign="middle" align="left">0.27</td>
<td valign="middle" align="left">0.62</td>
<td valign="middle" align="left">0.47</td>
<td valign="middle" align="left">0.39</td>
</tr>
<tr>
<td valign="middle" align="left">D3-D5</td>
<td valign="middle" align="left">0.28</td>
<td valign="middle" align="left">0.53</td>
<td valign="middle" align="left">0.36</td>
<td valign="middle" align="left">0.53</td>
<td valign="middle" align="left">0.46</td>
<td valign="middle" align="left">0.55</td>
</tr>
<tr>
<td valign="middle" align="left">D4-D1</td>
<td valign="middle" align="left">0.34</td>
<td valign="middle" align="left">0.35</td>
<td valign="middle" align="left">0.33</td>
<td valign="middle" align="left">0.68</td>
<td valign="middle" align="left">0.63</td>
<td valign="middle" align="left">0.77</td>
</tr>
<tr>
<td valign="middle" align="left">D4-D2</td>
<td valign="middle" align="left">0.5</td>
<td valign="middle" align="left">0.51</td>
<td valign="middle" align="left">0.46</td>
<td valign="middle" align="left">0.68</td>
<td valign="middle" align="left">0.65</td>
<td valign="middle" align="left">0.82</td>
</tr>
<tr>
<td valign="middle" align="left">D4-D3</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.58</td>
<td valign="middle" align="left">0.39</td>
<td valign="middle" align="left">0.76</td>
<td valign="middle" align="left">0.73</td>
<td valign="middle" align="left">0.78</td>
</tr>
<tr>
<td valign="middle" align="left">D4-D5</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.6</td>
<td valign="middle" align="left">0.33</td>
<td valign="middle" align="left">0.69</td>
<td valign="middle" align="left">0.58</td>
<td valign="middle" align="left">0.79</td>
</tr>
<tr>
<td valign="middle" align="left">D5-D1</td>
<td valign="middle" align="left">0.01</td>
<td valign="middle" align="left">0.43</td>
<td valign="middle" align="left">0.34</td>
<td valign="middle" align="left">0.73</td>
<td valign="middle" align="left">0.38</td>
<td valign="middle" align="left">0.72</td>
</tr>
<tr>
<td valign="middle" align="left">D5-D2</td>
<td valign="middle" align="left">0.04</td>
<td valign="middle" align="left">0.54</td>
<td valign="middle" align="left">0.37</td>
<td valign="middle" align="left">0.8</td>
<td valign="middle" align="left">0.51</td>
<td valign="middle" align="left">0.83</td>
</tr>
<tr>
<td valign="middle" align="left">D5-D3</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.6</td>
<td valign="middle" align="left">0.53</td>
<td valign="middle" align="left">0.79</td>
<td valign="middle" align="left">0.61</td>
<td valign="middle" align="left">0.81</td>
</tr>
<tr>
<td valign="middle" align="left">D5-D4</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.62</td>
<td valign="middle" align="left">0.58</td>
<td valign="middle" align="left">0.82</td>
<td valign="middle" align="left">0.22</td>
<td valign="middle" align="left">0.81</td>
</tr>
<tr>
<td valign="middle" align="left">AVG</td>
<td valign="middle" align="left">0.18</td>
<td valign="middle" align="left">0.53</td>
<td valign="middle" align="left">0.38</td>
<td valign="middle" align="left">0.72</td>
<td valign="middle" align="left">0.46</td>
<td valign="middle" align="left">0.72</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>The metrics reported are R&#xb2;.</p>
</fn>
</table-wrap-foot>
</table-wrap>
<table-wrap id="T4" position="float">
<label>Table&#xa0;4</label>
<caption>
<p>The performance results of plant traits prediction at five growth stages using PLSR and PPADA model.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Group</th>
<th valign="middle" align="center">CHL-PLSR</th>
<th valign="middle" align="center">CHL-PPADA</th>
<th valign="middle" align="center">EWT-PLSR</th>
<th valign="middle" align="center">EWT-PPADA</th>
<th valign="middle" align="center">LMA-PLSR</th>
<th valign="middle" align="center">LMA-PPADA</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">D1-D2</td>
<td valign="middle" align="left">0.21</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.16</td>
<td valign="middle" align="left">0.09</td>
<td valign="middle" align="left">0.15</td>
<td valign="middle" align="left">0.11</td>
</tr>
<tr>
<td valign="middle" align="left">D1-D3</td>
<td valign="middle" align="left">0.26</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.17</td>
<td valign="middle" align="left">0.08</td>
<td valign="middle" align="left">0.16</td>
<td valign="middle" align="left">0.09</td>
</tr>
<tr>
<td valign="middle" align="left">D1-D4</td>
<td valign="middle" align="left">0.23</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.14</td>
<td valign="middle" align="left">0.08</td>
<td valign="middle" align="left">0.17</td>
<td valign="middle" align="left">0.1</td>
</tr>
<tr>
<td valign="middle" align="left">D1-D5</td>
<td valign="middle" align="left">0.18</td>
<td valign="middle" align="left">0.1</td>
<td valign="middle" align="left">0.14</td>
<td valign="middle" align="left">0.09</td>
<td valign="middle" align="left">0.16</td>
<td valign="middle" align="left">0.11</td>
</tr>
<tr>
<td valign="middle" align="left">D2-D1</td>
<td valign="middle" align="left">0.24</td>
<td valign="middle" align="left">0.14</td>
<td valign="middle" align="left">0.14</td>
<td valign="middle" align="left">0.09</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.09</td>
</tr>
<tr>
<td valign="middle" align="left">D2-D3</td>
<td valign="middle" align="left">0.31</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.13</td>
<td valign="middle" align="left">0.08</td>
<td valign="middle" align="left">0.15</td>
<td valign="middle" align="left">0.1</td>
</tr>
<tr>
<td valign="middle" align="left">D2-D4</td>
<td valign="middle" align="left">0.26</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.13</td>
<td valign="middle" align="left">0.08</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.08</td>
</tr>
<tr>
<td valign="middle" align="left">D2-D5</td>
<td valign="middle" align="left">0.23</td>
<td valign="middle" align="left">0.1</td>
<td valign="middle" align="left">0.14</td>
<td valign="middle" align="left">0.1</td>
<td valign="middle" align="left">0.14</td>
<td valign="middle" align="left">0.09</td>
</tr>
<tr>
<td valign="middle" align="left">D3-D1</td>
<td valign="middle" align="left">0.21</td>
<td valign="middle" align="left">0.16</td>
<td valign="middle" align="left">0.16</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.13</td>
<td valign="middle" align="left">0.11</td>
</tr>
<tr>
<td valign="middle" align="left">D3-D2</td>
<td valign="middle" align="left">0.26</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.13</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.12</td>
</tr>
<tr>
<td valign="middle" align="left">D3-D4</td>
<td valign="middle" align="left">0.23</td>
<td valign="middle" align="left">0.13</td>
<td valign="middle" align="left">0.14</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.13</td>
</tr>
<tr>
<td valign="middle" align="left">D3-D5</td>
<td valign="middle" align="left">0.18</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.13</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.13</td>
<td valign="middle" align="left">0.12</td>
</tr>
<tr>
<td valign="middle" align="left">D4-D1</td>
<td valign="middle" align="left">0.21</td>
<td valign="middle" align="left">0.16</td>
<td valign="middle" align="left">0.21</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.09</td>
</tr>
<tr>
<td valign="middle" align="left">D4-D2</td>
<td valign="middle" align="left">0.16</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.16</td>
<td valign="middle" align="left">0.1</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.08</td>
</tr>
<tr>
<td valign="middle" align="left">D4-D3</td>
<td valign="middle" align="left">0.22</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.22</td>
<td valign="middle" align="left">0.09</td>
<td valign="middle" align="left">0.1</td>
<td valign="middle" align="left">0.09</td>
</tr>
<tr>
<td valign="middle" align="left">D4-D5</td>
<td valign="middle" align="left">0.18</td>
<td valign="middle" align="left">0.1</td>
<td valign="middle" align="left">0.18</td>
<td valign="middle" align="left">0.09</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.08</td>
</tr>
<tr>
<td valign="middle" align="left">D5-D1</td>
<td valign="middle" align="left">0.41</td>
<td valign="middle" align="left">0.15</td>
<td valign="middle" align="left">0.24</td>
<td valign="middle" align="left">0.1</td>
<td valign="middle" align="left">0.21</td>
<td valign="middle" align="left">0.1</td>
</tr>
<tr>
<td valign="middle" align="left">D5-D2</td>
<td valign="middle" align="left">0.32</td>
<td valign="middle" align="left">0.11</td>
<td valign="middle" align="left">0.21</td>
<td valign="middle" align="left">0.08</td>
<td valign="middle" align="left">0.2</td>
<td valign="middle" align="left">0.07</td>
</tr>
<tr>
<td valign="middle" align="left">D5-D3</td>
<td valign="middle" align="left">0.33</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.22</td>
<td valign="middle" align="left">0.09</td>
<td valign="middle" align="left">0.19</td>
<td valign="middle" align="left">0.09</td>
</tr>
<tr>
<td valign="middle" align="left">D5-D4</td>
<td valign="middle" align="left">0.34</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.19</td>
<td valign="middle" align="left">0.07</td>
<td valign="middle" align="left">0.22</td>
<td valign="middle" align="left">0.07</td>
</tr>
<tr>
<td valign="middle" align="left">AVG</td>
<td valign="middle" align="left">0.25</td>
<td valign="middle" align="left">0.12</td>
<td valign="middle" align="left">0.17</td>
<td valign="middle" align="left">0.09</td>
<td valign="middle" align="left">0.15</td>
<td valign="middle" align="left">0.10</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>The metrics reported are nRMSE.</p>
</fn>
</table-wrap-foot>
</table-wrap>
<p>The model&#x2019;s performance varied considerably across different dataset combinations. For example, in predicting LMA, when using D1 as the training set and D2 as the testing set, PPADA-Net achieved an R&#xb2; of 0.68 and an nRMSE of 0.11, whereas PLSR recorded an R&#xb2; of 0.34 and an nRMSE of 0.15. Similarly, for the D5&#x2013;D4 combination, PPADA-Net attained an R&#xb2; of 0.81 and an nRMSE of 0.07, in contrast to PLSR&#x2019;s R&#xb2; of 0.22 and an nRMSE of 0.22. These examples further highlight that the prediction accuracy of data-driven models is significantly influenced by the training set. Under these conditions, PPADA-Net demonstrated greater stability and adaptability across various dataset combinations. For example, in predicting EWT across different dataset combinations, PLSR exhibited an R&#xb2; range of 0.18&#x2013;0.58 and an nRMSE range of 0.13&#x2013;0.24. In contrast, PPADA-Net displayed an R&#xb2; range of 0.53&#x2013;0.82 and an nRMSE range of 0.07&#x2013;0.12, indicating that the integration of simulated data pre-training and adversarial domain alignment enables the model to better handle domain shifts and maintain consistent performance across diverse data sources, which is particularly valuable in cases of high data heterogeneity. Overall, among the three traits, CHL is generally more challenging to predict accurately compared to EWT and LMA. Although PPADA-Net achieved relatively high R&#xb2; values for LMA in certain cases, some dataset combinations still yielded low R&#xb2; values, suggesting that achieving high-precision transfer for CHL prediction across different datasets remains challenging.</p>
</sec>
<sec id="s3_4">
<label>3.4</label>
<title>Performance evaluation in independently collected field dataset</title>
<p>When training the model on D1&#x2013;D4 and testing on the independently collected field D5 dataset, notable performance differences were observed across models. The PPADA model (<xref ref-type="fig" rid="f11">
<bold>Figure&#xa0;11a</bold>
</xref>) achieved high prediction accuracy for all three traits, with R&#xb2; values of 0.72 (CHL), 0.78 (EWT), and 0.87 (LMA), and corresponding nRMSE values of 0.08, 0.08, and 0.07. The scatter plots show strong linear agreement between predicted and observed values, especially for LMA, where most points closely align with the 1:1 line, indicating superior predictive performance. CHL and EWT predictions were more concentrated in the low-to-mid value ranges, with slightly reduced agreement at higher values. In contrast, the PLSR model (<xref ref-type="fig" rid="f11">
<bold>Figure&#xa0;11b</bold>
</xref>) produced lower overall accuracy, with R&#xb2; values of 0.53, 0.56, and 0.71 for CHL, EWT, and LMA, and nRMSE values of 0.10, 0.11, and 0.10, respectively. The scatter plots display greater dispersion, particularly for CHL and EWT, where deviations were more pronounced in the lower measurement ranges. Some improvement in agreement was observed at higher values. Among the three traits, LMA still yielded the best performance, but compared to PPADA, its R&#xb2; was 0.16 lower and nRMSE was 0.03 higher.</p>
<fig id="f11" position="float">
<label>Figure&#xa0;11</label>
<caption>
<p>Comparison of prediction performance between the PPADA model <bold>(a)</bold> and PLSR model <bold>(b)</bold> for CHL, EWT and LMA. Each subplot shows scatterplots of estimated versus measured values, with marginal density distributions. The dashed gray line represents the 1:1 line of perfect agreement, and the solid gray line indicates the linear regression fit. The coefficient of determination (R&#xb2;) and normalized root mean square error (nRMSE) are provided for each model.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1612430-g011.tif">
<alt-text content-type="machine-generated">Two rows of scatter plots with density plots comparing estimated and measured values. Top row (a) uses PPADA model for CHL, EWT, and LMA, showing high correlation with R-squared values of 0.72, 0.78, and 0.87. Bottom row (b) uses PLSR model for the same variables, with lower R-squared values of 0.53, 0.56, and 0.71. Each plot includes regression lines, confidence intervals, and nRMSE values. Density plots for each axis are shown on the margins.</alt-text>
</graphic>
</fig>
</sec>
<sec id="s3_5">
<label>3.5</label>
<title>Spatial mapping of plant traits</title>
<p>Accurate mapping of plant functional traits serves as a pivotal tool for advancing crop breeding and precision agricultural management. The PPADA model was trained using datasets D1-D4 and subsequently applied to spatially visualize CHL, EWT, and LMA in 490 field samples from the independent test set D5, as shown in <xref ref-type="fig" rid="f11">
<bold>Figure&#xa0;11</bold>
</xref>. The results demonstrate a high degree of consistency between the spatial maps predicted by PPADA and the ground truth measurements, validating the robustness of the model in trait estimation. Among the three traits, LMA exhibited the highest spatial consistency, with prediction errors concentrated in the low-value range. For CHL and EWT, although most predictions closely matched the observed values, the model showed slight underestimation for samples with exceptionally high values. Overall, the PPADA model demonstrated its capability for high-throughput, spectral-based trait monitoring and field mapping applications.</p>
</sec>
</sec>
<sec id="s4" sec-type="discussion">
<label>4</label>
<title>Discussion</title>
<sec id="s4_1">
<label>4.1</label>
<title>Comparison and analysis of models for plant traits estimation</title>
<p>The use of spectral features, including reflectance data, vegetation indices, and spectral derivatives, for plant trait prediction has been widely applied in previous studies (<xref ref-type="bibr" rid="B22">Li et&#xa0;al., 2024</xref>). For example, <xref ref-type="bibr" rid="B1">Aguirre-Gutierrez et&#xa0;al. (2021)</xref> demonstrated that key plant functional traits can be accurately predicted across the tropics using the high spatial and spectral resolution of Sentinel-2 imagery in conjunction with climatic and soil information. While PLSR offers high interpretability and computational efficiency in plant traits estimation, it has several limitations. First, it does not support multi-task learning, requiring separate models for different traits, which results in fragmented workflows. Second, it struggles to capture complex spectral-trait relationships, especially in high-dimensional or nonlinear contexts, such as SWIR interactions with leaf dry matter. As shown in our results, PLSR exhibited the lowest performance (mean R&#xb2; = 0.59&#x2013;0.72) and showed a significant decline in accuracy in cross-dataset scenarios (e.g., CHL R&#xb2; = 0.18), highlighting its vulnerability to data heterogeneity.</p>
<p>Deep neural networks, such as CNN, have improved traits prediction by enabling end-to-end multi-task learning and hierarchical feature extraction. <xref ref-type="bibr" rid="B8">Cherif et&#xa0;al. (2023)</xref> demonstrated that CNN-based architectures could simultaneously predict multiple traits with enhanced nonlinear modeling. While these models outperform traditional methods in single-domain settings, their heavy reliance on large, labeled datasets poses a challenge (<xref ref-type="bibr" rid="B7">Chen et&#xa0;al., 2023</xref>), as training robust models requires extensive field measurements, which are costly and scarce for certain traits. Furthermore, these models often exhibit poor cross-dataset generalizability due to domain shifts in spectral patterns (<xref ref-type="bibr" rid="B37">Zhang and Bao, 2022</xref>). Our results support these findings: ResNet showed inconsistent performance across traits (e.g., EWT R&#xb2; = 0.61 vs. PLSR&#x2019;s 0.63) and experienced significant accuracy declines in transfer learning scenarios (e.g., LMA R&#xb2; = 0.68 for D1-D2), highlighting its sensitivity to domain-specific biases. To overcome these limitations, PPADA-Net integrates physical priors from radiative transfer models (e.g., PROSPECT-simulated spectra) with adversarial domain adaptation, achieving dual benefits: physics-informed feature learning and robust cross-domain generalization. It outperformed all baseline models across traits (CHL: R&#xb2; = 0.72; EWT: R&#xb2; = 0.77; LMA: R&#xb2; = 0.86), with accuracy gains of 5.1% to 22.4% over PLSR and ResNet. These results have practical implications for remote sensing-based trait monitoring in real-world settings. PPADA-Net&#x2019;s strong performance in cross-dataset validation (e.g., LMA R&#xb2; = 0.72 vs. PLSR&#x2019;s 0.46) demonstrates its ability to mitigate domain shifts&#x2014;a key challenge when applying models across regions, time periods, or sensors. This is particularly valuable for large-scale agricultural monitoring and ecosystem management, where collecting labeled data in every new condition is impractical. Furthermore, the integration of physical priors reduces reliance on field data, potentially lowering the cost and labor of trait estimation.</p>
</sec>
<sec id="s4_2">
<label>4.2</label>
<title>Model performance with PROSEPECT-D simulation data</title>
<p>Physical models, such as the PROSPECT-D radiative transfer model, simulate spectral reflectance based on the biochemical and structural properties of plant leaves, providing mechanistic insights into light-matter interactions (<xref ref-type="bibr" rid="B26">Peters and Noble, 2020</xref>). For example, <xref ref-type="bibr" rid="B4">Bhadra et&#xa0;al. (2024)</xref> used PROSPECT to simulate hyperspectral responses under varying parameters, demonstrating its effectiveness in controlled experimental settings. While these models offer a rigorous framework for understanding spectral-trait relationships, their standalone application is constrained by computational complexity, sensitivity to input parameter accuracy, and limited adaptability to real-world environmental variability. In contrast, purely data-driven models excel at capturing complex patterns from large datasets but often lack interpretability and struggle with generalization when data is scarce or subject to domain shifts (<xref ref-type="bibr" rid="B17">Hou et&#xa0;al., 2024</xref>). Integrating physical models with data-driven approaches has emerged as a promising strategy for improving plant trait estimation.</p>
<p>Integrating PROSPECT-D-generated simulations with empirical datasets mitigates these limitations by leveraging the complementary strengths of both approaches. Physical models enhance training data diversity by synthesizing spectra across a broad range of trait values and environmental conditions, reducing reliance on costly field measurements. Meanwhile, data-driven methods refine feature representations and optimize nonlinear mappings, compensating for the simplifications inherent in physical models. In this study, pretraining ResNet on PROSPECT-D-simulated spectra (ResNet-PROSPECT) significantly improved prediction accuracy compared to the baseline ResNet. For instance, LMA prediction R&#xb2; increased from 0.72 (ResNet) to 0.80 (ResNet-PROSPECT), while EWT accuracy improved by 17.7% (R&#xb2;=0.74 vs. 0.63), highlighting the benefits of physics-informed initialization. These improvements result from two key mechanisms: (1) simulated data introduced reflectance variations under extreme or rare trait values, enhancing model robustness; (2) PROSPECT-D&#x2019;s parameterization guided the network to focus on wavelengths critical for specific traits. The success of PROSPECT-D-enhanced models suggests a scalable approach for trait estimation in heterogeneous environments. By generating synthetic spectra for underrepresented conditions, physical models can pre-train networks to generalize beyond empirical dataset limitations. Future work could explore dynamically integrating physical simulations with domain adaptation, such as iteratively updating PROSPECT-D parameters based on field observations to refine synthetic data quality. Such advancements would further bridge the gap between theoretical modeling and empirical applications, fostering robust solutions for ecosystem monitoring under climate change.</p>
</sec>
<sec id="s4_3">
<label>4.3</label>
<title>Transferability analysis</title>
<p>The transferability of spectral-trait prediction models is crucial for real-world applications, as ecosystems exhibit substantial variability in spectral signatures and plant trait distributions due to differences in species composition, environmental conditions, and measurement protocols (<xref ref-type="bibr" rid="B38">Zhang et&#xa0;al., 2025a</xref>). Traditional machine learning and deep learning models often struggle to generalize across such heterogeneous datasets, limiting their effectiveness in large-scale monitoring. For instance, PLSR, while computationally efficient, demonstrates poor adaptability to spectral heterogeneity. In cross-dataset validation, PLSR achieved a mean R&#xb2; of only 0.18 for CHL and 0.46 for LMA, with nRMSE values reaching 0.25. Similarly, deep learning models such as ResNet, despite their capacity for multi-task learning, remain susceptible to domain-specific biases.</p>
<p>Adversarial domain adaptation has emerged as a powerful strategy for aligning feature distributions across domains. For example, <xref ref-type="bibr" rid="B23">Ma et&#xa0;al. (2022)</xref> proposed a domain adaptation scheme named adversarial entropy optimization to learn domain-invariant features, achieving state-of-the-art performance across diverse domain adaptation tasks. In this study, PPADA-Net integrates this technique with physical priors to address spectral heterogeneity. The model achieved a mean R&#xb2; of 0.53 for CHL, 0.72 for LMA, and 0.65 for EWT across all dataset combinations, outperforming PLSR by 35&#x2013;52% in R&#xb2;. Notably, PPADA-Net maintained stable performance even in challenging scenarios, such as the D5&#x2192;D4 transfer for LMA (R&#xb2;=0.81 vs. PLSR&#x2019;s 0.22), demonstrating its robustness against domain-specific noise. The enhanced transferability arises from two synergistic mechanisms. First, PROSPECT-D-simulated spectra provides trait-specific spectral patterns, enabling the model to prioritize domain-invariant biochemical signals over dataset-specific artifacts. Second, by minimizing discrepancies between source and target domains in high-dimensional feature space, PPADA-Net mitigates overfitting to local spectral variations. Future research could extend this framework to dynamically adapt to emerging ecosystems or sensor types, further bridging the gap between controlled simulations and field applications.</p>
</sec>
<sec id="s4_4">
<label>4.4</label>
<title>Performance on independently collected field datasets</title>
<p>PPADA-Net demonstrated exceptional performance on the independently collected field dataset (D5), comprising potato, soybean, and maize crops under diverse cultivars and growth stages. The model achieved R&#xb2; values of 0.72 (CHL), 0.78 (EWT), and 0.87 (LMA) with nRMSE reductions of 20%&#x2013;30% compared to PLSR (<xref ref-type="fig" rid="f10">
<bold>Figure&#xa0;10</bold>
</xref>). Notably, LMA prediction exhibited the highest accuracy (R&#xb2; = 0.87), likely due to its strong spectral-physical linkage with dry matter content, as captured by PROSPECT-D simulations. These results underscore the framework&#x2019;s ability to generalize beyond controlled experimental conditions, addressing the critical challenge of domain shifts caused by cultivar diversity, growth stage variability, and field-specific environmental factors. From an agronomic perspective, the high accuracy of PPADA-Net in mapping LMA and EWT (<xref ref-type="fig" rid="f12">
<bold>Figure&#xa0;12</bold>
</xref>) holds significant promise for optimizing irrigation scheduling and nutrient management. For example, spatially resolved LMA estimates could guide breeders in selecting drought-tolerant cultivars, while EWT monitoring may improve water-use efficiency in water-scarce regions. However, slight underestimation of CHL in high-value ranges (<xref ref-type="fig" rid="f10">
<bold>Figure&#xa0;10a</bold>
</xref>) suggests that chlorophyll&#x2019;s nonlinear spectral interactions under saturating conditions require further refinement. Future work should also validate the framework across broader agro-climatic zones and crop phenological stages to ensure scalability.</p>
<fig id="f12" position="float">
<label>Figure&#xa0;12</label>
<caption>
<p>Spatial mapping comparison of plant traits between PPADA predictions and ground measurements. The three above are the measured CHL, EWT, and LMA, while the three below are the predicted values.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1612430-g012.tif">
<alt-text content-type="machine-generated">Heatmaps showing data for CHL, EWT, and LMA, each with two panels labeled &#x201c;Measured&#x201d; and &#x201c;Estimated&#x201d;. Color gradients from green to red indicate varying data values across the panels.</alt-text>
</graphic>
</fig>
</sec>
<sec id="s4_5">
<label>4.5</label>
<title>Limitations</title>
<p>While PPADA-Net demonstrates significant advancements in plant trait estimation, its performance, like other machine learning methods, remains heavily dependent on the quality and quantity of training data. The framework&#x2019;s effectiveness diminishes when training data are scarce or unrepresentative of target environments. For instance, PPADA-Net&#x2019;s reliance on PROSPECT-D-generated simulations introduces biases if the radiative transfer model fails to capture extreme environmental conditions. In such cases, synthetic data may inadequately represent real-world spectral-trait relationships, limiting generalization. To mitigate this, future work should incorporate anthropogenic constraints into simulations, such as expanding parameter ranges for leaf structure and biochemical composition, to enhance the diversity and representativeness of synthetic datasets. This would ensure simulated spectra encompass greater information entropy, better aligning with the variability observed in field data. On the other hand, although adversarial learning in PPADA-Net effectively aligns feature distributions across datasets, its performance may degrade when domain discrepancies are extreme. While our results demonstrated robustness in most cross-dataset validations, scenarios involving stark domain shifts require careful hyperparameter tuning to stabilize adversarial training. In summary, by refining the constructive interaction between physically based simulation and adaptive learning, PPADA-Net can evolve into a reliable tool for precision agriculture.</p>
</sec>
</sec>
<sec id="s5" sec-type="conclusion">
<label>5</label>
<title>Conclusion</title>
<p>This study presents the PROSPECT Pre-trained Adversarial Domain Adaptation Network (PPADA-Net), a novel framework that synergizes physical radiative transfer modeling with adversarial domain adaptation to address cross-ecosystem plant trait estimation. Rigorously validated across five datasets&#x2014;including field experiments conducted at an agricultural research station in Xinxiang, China (D5)&#x2014;the framework demonstrates robust performance in both controlled and real-world agricultural environments. PPADA-Net achieves state-of-the-art accuracy in predicting chlorophyll content (CHL: R&#xb2; = 0.72), equivalent water thickness (EWT: R&#xb2; = 0.78), and leaf mass per area (LMA: R&#xb2; = 0.87) on the independently measured crop dataset (D5), significantly outperforming conventional PLSR with nRMSE reductions of 20%&#x2013;30%. Notably, LMA prediction exhibited the highest robustness (nRMSE = 0.07), highlighting the model&#x2019;s ability to generalize across heterogeneous field conditions, such as cultivar diversity, growth stage variability, and sensor-specific spectral biases. The integration of field-collected hyperspectral data with adversarial domain adaptation proved critical for bridging synthetic simulations and practical applications. For instance, the hierarchical cross-domain alignment module effectively mitigated domain shifts between laboratory datasets (D1&#x2013;D4) and real-world agricultural data (D5), as evidenced by t-SNE visualizations of domain-invariant features. In summary, PPADA-Net harmonizes physics-driven priors with data-driven adaptability, offering a transformative solution for plant trait estimation in heterogeneous environments.</p>
</sec>
</body>
<back>
<sec id="s6" sec-type="data-availability">
<title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec id="s7" sec-type="author-contributions">
<title>Author contributions</title>
<p>HZ: Funding acquisition, Writing &#x2013; review &amp; editing, Investigation, Writing &#x2013; original draft, Formal analysis. HS: Writing &#x2013; original draft, Conceptualization, Visualization, Data curation. TS: Writing &#x2013; review &amp; editing, Formal analysis, Methodology. GS: Writing &#x2013; original draft, Data curation, Conceptualization. QW: Writing &#x2013; review &amp; editing, Funding acquisition, Validation, Supervision.</p>
</sec>
<sec id="s8" sec-type="funding-information">
<title>Funding</title>
<p>The author(s) declare that financial support was received for the research and/or publication of this article. The work is supported by the Innovation Exploration and Academic Talent Project of GUFE (Grant No. 2022XSXMB11), the Natural Science Research Project of Guizhou Provincial Department of Education (Grant No. QJJ(2023)063).</p>
</sec>
<sec id="s9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="s10" sec-type="ai-statement">
<title>Generative AI statement</title>
<p>The author(s) declare that no Generative AI was used in the creation of this manuscript.</p>
</sec>
<sec id="s11" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<sec id="s12" sec-type="supplementary-material">
<title>Supplementary material</title>
<p>The Supplementary Material for this article can be found online at: <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/articles/10.3389/fpls.2025.1612430/full#supplementary-material">https://www.frontiersin.org/articles/10.3389/fpls.2025.1612430/full#supplementary-material</ext-link>
</p>
<supplementary-material xlink:href="Table1.docx" id="SM1" mimetype="application/vnd.openxmlformats-officedocument.wordprocessingml.document"/>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Aguirre-Gutierrez</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Rifal</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Shenkin</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Oliveras</surname> <given-names>I.</given-names>
</name>
<name>
<surname>Bentley</surname> <given-names>L. P.</given-names>
</name>
<name>
<surname>Svatek</surname> <given-names>M.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Pantropical modelling of canopy functional traits using Sentinel-2 remote sensing data</article-title>. <source>Remote Sens. Environ.</source> <volume>252</volume>, <fpage>112122</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2020.112122</pub-id>
</citation></ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Amirkolaee</surname> <given-names>H. A.</given-names>
</name>
<name>
<surname>Shi</surname> <given-names>M.</given-names>
</name>
<name>
<surname>He</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Mulligan</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>AdaTreeFormer: Few shot domain adaptation for tree counting from a single high-resolution image</article-title>. <source>Isprs J. Photogramm. Remote Sens.</source> <volume>214</volume>, <fpage>193</fpage>&#x2013;<lpage>208</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.isprsjprs.2024.06.015</pub-id>
</citation></ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Angel</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Shiklomanov</surname> <given-names>A. N.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Remote detection and monitoring of plant traits: theory and practice</article-title>. <source>Annu. Plant Rev. Online</source> <volume>5</volume>.</citation></ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bhadra</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Sagan</surname> <given-names>V.</given-names>
</name>
<name>
<surname>Sarkar</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Braud</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Mockler</surname> <given-names>T. C.</given-names>
</name>
<name>
<surname>Eveland</surname> <given-names>A. L.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>PROSAIL-Net: A transfer learning-based dual stream neural network to estimate leaf chlorophyll and leaf angle of crops from UAV hyperspectral images</article-title>. <source>Isprs J. Photogramm. Remote Sens.</source> <volume>210</volume>, <fpage>1</fpage>&#x2013;<lpage>24</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.isprsjprs.2024.02.020</pub-id>
</citation></ref>
<ref id="B5">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Broge</surname> <given-names>N. H.</given-names>
</name>
<name>
<surname>Leblanc</surname> <given-names>E.</given-names>
</name>
</person-group> (<year>2001</year>). <article-title>Comparing prediction power and stability of broadband and hyperspectral vegetation indices for estimation of green leaf area index and canopy chlorophyll density</article-title>. <source>Remote Sens. Environ.</source> <volume>76</volume>, <fpage>156</fpage>&#x2013;<lpage>172</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/S0034-4257(00)00197-8</pub-id>
</citation></ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chen</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Qi</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Shi</surname> <given-names>Z.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Remote sensing image change detection with transformers</article-title>. <source>IEEE Trans. Geosci. Remote Sens.</source> <volume>60</volume>, <page-range>1&#x2013;14</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TGRS.2021.3095166</pub-id>
</citation></ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chen</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Shen</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Guo</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>A gridless DOA estimation algorithm based on unsupervised deep learning</article-title>. <source>Digital Signal Process.</source> <volume>133</volume>, <fpage>103823</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.dsp.2022.103823</pub-id>
</citation></ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Cherif</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Feilhauer</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Berger</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Dao</surname> <given-names>P. D.</given-names>
</name>
<name>
<surname>Ewald</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Hank</surname> <given-names>T. B.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>From spectra to plant functional traits: Transferable multi-trait models from heterogeneous and sparse data</article-title>. <source>Remote Sens. Environ.</source> <volume>292</volume>, <fpage>113580</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2023.113580</pub-id>
</citation></ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Drenovsky</surname> <given-names>R. E.</given-names>
</name>
<name>
<surname>Grewell</surname> <given-names>B. J.</given-names>
</name>
<name>
<surname>D&#x2019;Antonio</surname> <given-names>C. M.</given-names>
</name>
<name>
<surname>Funk</surname> <given-names>J. L.</given-names>
</name>
<name>
<surname>James</surname> <given-names>J. J.</given-names>
</name>
<name>
<surname>Molinari</surname> <given-names>N.</given-names>
</name>
<etal/>
</person-group>. (<year>2012</year>). <article-title>A functional trait perspective on plant invasion</article-title>. <source>Ann. Bot.</source> <volume>110</volume>, <fpage>141</fpage>&#x2013;<lpage>153</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/aob/mcs100</pub-id>, PMID: <pub-id pub-id-type="pmid">22589328</pub-id></citation></ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fatichi</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Pappas</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Zscheischler</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Leuzinger</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Modelling carbon sources and sinks in terrestrial vegetation</article-title>. <source>New Phytol.</source> <volume>221</volume>, <fpage>652</fpage>&#x2013;<lpage>668</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/nph.15451</pub-id>, PMID: <pub-id pub-id-type="pmid">30339280</pub-id></citation></ref>
<ref id="B11">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Feret</surname> <given-names>J. B.</given-names>
</name>
<name>
<surname>Gitelson</surname> <given-names>A. A.</given-names>
</name>
<name>
<surname>Noble</surname> <given-names>S. D.</given-names>
</name>
<name>
<surname>Jacquemoud</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>PROSPECT-D: Towards modeling leaf optical properties through a complete lifecycle</article-title>. <source>Remote Sens. Environ.</source> <volume>193</volume>, <fpage>204</fpage>&#x2013;<lpage>215</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2017.03.004</pub-id>
</citation></ref>
<ref id="B12">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ganin</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Ustinova</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Ajakan</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Germain</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Larochelle</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Laviolette</surname> <given-names>F.</given-names>
</name>
<etal/>
</person-group>. (<year>2016</year>). <article-title>Domain-adversarial training of neural networks</article-title>. <source>J. Mach. Learn. Res.</source> <volume>17</volume>, <page-range>1&#x2013;35</page-range>.</citation></ref>
<ref id="B13">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Haboudane</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Miller</surname> <given-names>J. R.</given-names>
</name>
<name>
<surname>Pattey</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Zarco-Tejada</surname> <given-names>P. J.</given-names>
</name>
<name>
<surname>Strachan</surname> <given-names>I. B.</given-names>
</name>
</person-group> (<year>2004</year>). <article-title>Hyperspectral vegetation indices and novel algorithms for predicting green LAI of crop canopies: Modeling and validation in the context of precision agriculture</article-title>. <source>Remote Sens. Environ.</source> <volume>90</volume>, <fpage>337</fpage>&#x2013;<lpage>352</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2003.12.013</pub-id>
</citation></ref>
<ref id="B14">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Heidenreich</surname> <given-names>K. M.</given-names>
</name>
<name>
<surname>Richardson</surname> <given-names>T. L.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Photopigment, absorption, and growth responses of marine cryptophytes to varying spectral irradiance</article-title>. <source>J. Phycol.</source> <volume>56</volume>, <fpage>507</fpage>&#x2013;<lpage>520</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/jpy.12962</pub-id>, PMID: <pub-id pub-id-type="pmid">31876286</pub-id></citation></ref>
<ref id="B15">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Helsen</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Bassi</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Feilhauer</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Kattenborn</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Matsushima</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Van Cleemput</surname> <given-names>E.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Evaluating different methods for retrieving intraspecific leaf trait variation from hyperspectral leaf reflectance</article-title>. <source>Ecol. Indic.</source> <volume>130</volume>, <fpage>108111</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ecolind.2021.108111</pub-id>
</citation></ref>
<ref id="B16">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hoeppner</surname> <given-names>J. M.</given-names>
</name>
<name>
<surname>Skidmore</surname> <given-names>A. K.</given-names>
</name>
<name>
<surname>Darvishzadeh</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Heurich</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Chang</surname> <given-names>H.-C.</given-names>
</name>
<name>
<surname>Gara</surname> <given-names>T. W.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Mapping canopy chlorophyll content in a temperate forest using airborne hyperspectral data</article-title>. <source>Remote Sens.</source> <volume>12</volume>, <fpage>3573</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs12213573</pub-id>
</citation></ref>
<ref id="B17">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hou</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Lin</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Jiang</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Mei</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>State of charge estimation for lithium-ion batteries based on battery model and data-driven fusion method</article-title>. <source>Energy</source> <volume>290</volume>, <fpage>130056</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.energy.2023.130056</pub-id>
</citation></ref>
<ref id="B18">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hunt</surname> <given-names>E. R.</given-names>
</name>
<name>
<surname>Rock</surname> <given-names>B. N.</given-names>
</name>
</person-group> (<year>1989</year>). <article-title>Detection of changes in leaf water-content using near-infrared and middle-infrared reflectances</article-title>. <source>Remote Sens. Environ.</source> <volume>30</volume>, <fpage>43</fpage>&#x2013;<lpage>54</lpage>.</citation></ref>
<ref id="B19">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jacquemoud</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Baret</surname> <given-names>F.</given-names>
</name>
</person-group> (<year>1990</year>). <article-title>Prospect - A model of leaf optical-properties spectra</article-title>. <source>Remote Sens. Environ.</source> <volume>34</volume>, <fpage>75</fpage>&#x2013;<lpage>91</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/0034-4257(90)90100-Z</pub-id>
</citation></ref>
<ref id="B20">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Jing</surname> <given-names>J. J.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>L. Y.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>J. H.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>J. D.</given-names>
</name>
<name>
<surname>Zhao</surname> <given-names>C. J.</given-names>
</name>
<collab>IEEE</collab>
</person-group> (<year>2004</year>). &#x201c;<article-title>Uncertainty analysis for NDVI using the physical models</article-title>,&#x201d; in <source>IEEE International Geoscience and Remote Sensing Symposium</source>(<publisher-loc>Anchorage, AK</publisher-loc>) <volume>6</volume>, <fpage>4321</fpage>&#x2013;<lpage>4324</lpage>.</citation></ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>LeCun</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Bengio</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Hinton</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Deep learning</article-title>. <source>Nature</source> <volume>521</volume>, <fpage>436</fpage>&#x2013;<lpage>444</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/nature14539</pub-id>, PMID: <pub-id pub-id-type="pmid">26017442</pub-id></citation></ref>
<ref id="B22">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Zuo</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Nie</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>J.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>Predictions of <italic>Spartina alterniflora</italic> leaf functional traits based on hyperspectral data and machine learning models</article-title>. <source>Eur. J. Remote Sens.</source> <volume>57</volume>, <fpage>2294951</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/22797254.2023.2294951</pub-id>
</citation></ref>
<ref id="B23">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ma</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Lu</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Shen</surname> <given-names>H. T.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Adversarial entropy optimization for unsupervised domain adaptation</article-title>. <source>IEEE Trans. Neural Networks Learn. Syst.</source> <volume>33</volume>, <fpage>6263</fpage>&#x2013;<lpage>6274</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TNNLS.2021.3073119</pub-id>, PMID: <pub-id pub-id-type="pmid">33939616</pub-id></citation></ref>
<ref id="B24">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Meloni</surname> <given-names>D. A.</given-names>
</name>
<name>
<surname>Oliva</surname> <given-names>M. A.</given-names>
</name>
<name>
<surname>Martinez</surname> <given-names>C. A.</given-names>
</name>
<name>
<surname>Cambraia</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2003</year>). <article-title>Photosynthesis and activity of superoxide dismutase, peroxidase and glutathione reductase in cotton under salt stress</article-title>. <source>Environ. Exp. Bot.</source> <volume>49</volume>, <fpage>69</fpage>&#x2013;<lpage>76</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/S0098-8472(02)00058-8</pub-id>
</citation></ref>
<ref id="B25">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pan</surname> <given-names>S. J.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>Q.</given-names>
</name>
</person-group> (<year>2010</year>). <article-title>A survey on transfer learning</article-title>. <source>IEEE Trans. Knowledge Data Eng.</source> <volume>22</volume>, <fpage>1345</fpage>&#x2013;<lpage>1359</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TKDE.2009.191</pub-id>
</citation></ref>
<ref id="B26">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Peters</surname> <given-names>R. D.</given-names>
</name>
<name>
<surname>Noble</surname> <given-names>S. D.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Sensitivity and correlation analysis of PROSPECT-D and ABM-B leaf models</article-title>. <source>IEEE Trans. Geosci. Remote Sens.</source> <volume>58</volume>, <fpage>8258</fpage>&#x2013;<lpage>8267</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TGRS.36</pub-id>
</citation></ref>
<ref id="B27">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Poorter</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Niinemets</surname> <given-names>U.</given-names>
</name>
<name>
<surname>Poorter</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Wright</surname> <given-names>I. J.</given-names>
</name>
<name>
<surname>Villar</surname> <given-names>R.</given-names>
</name>
</person-group> (<year>2009</year>). <article-title>Causes and consequences of variation in leaf mass per area (LMA): a meta-analysis</article-title>. <source>New Phytol.</source> <volume>182</volume>, <fpage>565</fpage>&#x2013;<lpage>588</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/j.1469-8137.2009.02830.x</pub-id>, PMID: <pub-id pub-id-type="pmid">19434804</pub-id></citation></ref>
<ref id="B28">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Radford</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Kim</surname> <given-names>J. W.</given-names>
</name>
<name>
<surname>Hallacy</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Ramesh</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Goh</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Agarwal</surname> <given-names>S.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). &#x201c;<article-title>Learning transferable visual models from natural language supervision</article-title>,&#x201d; in <conf-name>International Conference on Machine Learning (ICML)</conf-name> (<publisher-name>Electr Network</publisher-name>). <page-range>8748&#x2013;8763</page-range>.</citation></ref>
<ref id="B29">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shu</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Dong</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Fei</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Meng</surname> <given-names>L.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>Improved estimation of canopy water status in maize using UAV-based digital and hyperspectral images</article-title>. <source>Comput. Electron. Agric.</source> <volume>197</volume>, <fpage>106982</fpage>.</citation></ref>
<ref id="B30">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sun</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>X.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>Improving soybean yield prediction by integrating UAV nadir and cross-circling oblique imaging</article-title>. <source>Eur. J. Agron.</source> <volume>155</volume>, <fpage>127134</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.eja.2024.127134</pub-id>
</citation></ref>
<ref id="B31">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sun</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Fei</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Han</surname> <given-names>S.</given-names>
</name>
<etal/>
</person-group>. (<year>2025</year>). <article-title>Bridging the gap between hyperspectral imaging and crop breeding: soybean yield prediction and lodging classification with prototype contrastive learning</article-title>. <source>Comput. Electron. Agric.</source> <volume>230</volume>, <fpage>109859</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.109859</pub-id>
</citation></ref>
<ref id="B32">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Skidmore</surname> <given-names>A. K.</given-names>
</name>
<name>
<surname>Darvishzadeh</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Heiden</surname> <given-names>U.</given-names>
</name>
<name>
<surname>Heurich</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Leaf nitrogen content indirectly estimated by leaf traits derived from the PROSPECT model</article-title>. <source>IEEE J. Selected Topics Appl. Earth Observ. Remote Sens.</source> <volume>8</volume>, <fpage>3172</fpage>&#x2013;<lpage>3182</lpage>.</citation></ref>
<ref id="B33">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wold</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Sj&#xf6;str&#xf6;m</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Eriksson</surname> <given-names>L.</given-names>
</name>
</person-group> (<year>2001</year>). <article-title>PLS-regression:: a basic tool of chemometrics</article-title>. <source>Chemometrics Intelligent Lab. Syst.</source> <volume>58</volume>, <fpage>109</fpage>&#x2013;<lpage>130</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/S0169-7439(01)00155-1</pub-id>
</citation></ref>
<ref id="B34">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wright</surname> <given-names>I. J.</given-names>
</name>
<name>
<surname>Reich</surname> <given-names>P. B.</given-names>
</name>
<name>
<surname>Westoby</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Ackerly</surname> <given-names>D. D.</given-names>
</name>
<name>
<surname>Baruch</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Bongers</surname> <given-names>F.</given-names>
</name>
<etal/>
</person-group>. (<year>2004</year>). <article-title>The worldwide leaf economics spectrum</article-title>. <source>Nature</source> <volume>428</volume>, <fpage>821</fpage>&#x2013;<lpage>827</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/nature02403</pub-id>, PMID: <pub-id pub-id-type="pmid">15103368</pub-id></citation></ref>
<ref id="B35">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yin</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>Y.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Multi-temporal UAV imaging-based mapping of chlorophyll content in potato crop</article-title>. <source>Pfg J. Photogramm. Remote Sens. Geoinform. Sci.</source> <volume>91</volume>, <fpage>91</fpage>&#x2013;<lpage>106</lpage>.</citation></ref>
<ref id="B36">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yue</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>K.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Chlorophyll content estimation of <italic>ginkgo</italic> seedlings based on deep learning and hyperspectral imagery</article-title>. <source>Forests</source> <volume>15</volume>, <fpage>2010</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/f15112010</pub-id>
</citation></ref>
<ref id="B37">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Bao</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Cross-dataset learning for age estimation</article-title>. <source>IEEE Access</source> <volume>10</volume>, <fpage>24048</fpage>&#x2013;<lpage>24055</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2022.3154403</pub-id>
</citation></ref>
<ref id="B38">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Cao</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2025</year>a). <article-title>Visible-infrared person re-identification with real-world label noise</article-title>. <source>IEEE Trans. Circuits Syst. Video Technol</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TCSVT.2025.3526449</pub-id>
</citation></ref>
<ref id="B39">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Shi</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Mu</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Deep-IRTarget: An automatic target detector in infrared imagery using dual-domain feature extraction and allocation</article-title>. <source>IEEE Trans. Multimedia</source> <volume>24</volume>, <fpage>1735</fpage>&#x2013;<lpage>1749</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TMM.2021.3070138</pub-id>
</citation></ref>
<ref id="B40">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Q.</given-names>
</name>
<etal/>
</person-group>. (<year>2025</year>b). <article-title>A benchmark and frequency compression method for infrared few-shot object detection</article-title>. <source>IEEE Trans. Geosci. Remote Sens</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TGRS.2025.3540945</pub-id>
</citation></ref>
</ref-list>
</back>
</article>