<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="review-article" dtd-version="1.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Plant Sci.</journal-id>
<journal-title-group>
<journal-title>Frontiers in Plant Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Plant Sci.</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">1664-462X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpls.2026.1731852</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Review</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Advancements in 3D field-crop phenotyping using point clouds: a comparative review of sensor technology, target traits, and challenges under controlled and field conditions</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Omia</surname><given-names>Emmanuel</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/2852227/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="visualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/visualization/">Visualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Park</surname><given-names>Eunsung</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3358776/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Semyalo</surname><given-names>Dennis</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/2715400/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Joshi</surname><given-names>Rahul</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/1963289/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Cho</surname><given-names>Byoung-Kwan</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>*</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/1565711/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project-administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
</contrib-group>
<aff id="aff1"><label>1</label><institution>Department of Smart Agriculture Systems Machinery Engineering, Chungnam National University</institution>, <city>Daejeon</city>,&#xa0;<country country="check-value">Republic of Korea</country></aff>
<aff id="aff2"><label>2</label><institution>Department of Smart Agriculture Systems, Chungnam National University</institution>, <city>Daejeon</city>,&#xa0;<country country="check-value">Republic of Korea</country></aff>
<author-notes>
<corresp id="c001"><label>*</label>Correspondence: Byoung-Kwan Cho, <email xlink:href="mailto:chobk@cnu.ac.kr">chobk@cnu.ac.kr</email></corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-02-06">
<day>06</day>
<month>02</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2026</year>
</pub-date>
<volume>17</volume>
<elocation-id>1731852</elocation-id>
<history>
<date date-type="received">
<day>27</day>
<month>10</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>12</day>
<month>01</month>
<year>2026</year>
</date>
<date date-type="rev-recd">
<day>24</day>
<month>12</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2026 Omia, Park, Semyalo, Joshi and Cho.</copyright-statement>
<copyright-year>2026</copyright-year>
<copyright-holder>Omia, Park, Semyalo, Joshi and Cho</copyright-holder>
<license>
<ali:license_ref start_date="2026-02-06">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<p>3D phenotyping refers to the quantitative characterization of a plant&#x2019;s structural and morphological traits in three-dimensional space, allowing for a detailed analysis of plant architecture and growth patterns. In recent years, rapid advancements in non-destructive, high-throughput 3D imaging technologies have enabled the precise measurement of these traits. Initially focused on single-plant traits under controlled conditions, the field has now expanded towards robust applications in real-world field environments, enabling large-scale analyses of plant canopies and complex structures. This study focuses on the recent advancements in 3D crop phenotyping using point cloud technologies. It compares sensor technology and its application in controlled environments (Chamber-Crop Phenotyping, CCP) and field conditions (Field-Crop Phenotyping, FCP). Technologies such as Multiview stereo (MVS) reconstruction, LiDAR, and laser triangulation have enhanced plant phenomics by enabling high-throughput, non-destructive measurements of key traits such as canopy structure, leaf area, and stem diameter. This review highlights the strengths of the CCP, where environmental variables and flexibility are tightly controlled, facilitating precise trait measurement, and contrasts it with the challenges of the FCP, where unpredictable factors, such as occlusion, wind, light variability, and terrain complexity, complicate data acquisition. Various sensor platforms, including ground-based robotic systems and unmanned aerial vehicles (UAVs), have been discussed regarding their ability to overcome occlusion and limited sensor range in real-world conditions. The need to transition these technologies from laboratory environments to real-world agricultural applications is emphasized, highlighting their potential to improve crop management and plant breeding through accurate phenotypic trait extraction. Finally, current research gaps and future directions for integrating advanced sensor platforms and analytical techniques in both CCP and FCP settings are identified, emphasizing the need to enhance the scalability and robustness of 3D phenotyping for field applications.</p>
</abstract>
<kwd-group>
<kwd>3D crop phenotyping</kwd>
<kwd>laser triangulation</kwd>
<kwd>plant phenomics</kwd>
<kwd>precision agriculture</kwd>
<kwd>stereo vision</kwd>
<kwd>structured light</kwd>
<kwd>terrestrial laser</kwd>
</kwd-group>
<funding-group>
<funding-statement>The author(s) declared that financial support was received for this work and/or its publication. This work was supported by the National Research Foundation of Korea (NRF) grant funded by the Korea government (MSIT) (No. RS-2024-00459812).</funding-statement>
</funding-group>
<counts>
<fig-count count="9"/>
<table-count count="6"/>
<equation-count count="0"/>
<ref-count count="169"/>
<page-count count="37"/>
<word-count count="26055"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Sustainable and Intelligent Phytoprotection</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>Geometry is fundamental to plant phenotyping, enabling the detailed analysis of plants&#x2019; morphological and structural traits in three-dimensional (3D) space (<xref ref-type="bibr" rid="B12">Bucksch et&#xa0;al., 2017</xref>). In recent decades, 3D point clouds have gained significant attention, particularly in the context of the Fourth Industrial Revolution, where they play a pivotal role in robotic vision perception and navigation (<xref ref-type="bibr" rid="B81">Lin and Juang, 2023</xref>). Today, almost every robotic system uses point clouds to interpret the complex geometries of its surroundings, facilitating safe and efficient navigation in human environments. This advancement has spurred the adoption of 3D point cloud technology in various fields, including agriculture (<xref ref-type="bibr" rid="B57">Jin et&#xa0;al., 2021</xref>). With the global population on the rise, there is an unprecedented demand for agricultural productivity and sustainable management of natural resources. Consequently, integrating 3D point cloud technology into traditional farming practices is becoming increasingly essential for boosting food production while minimizing the environmental impact of agriculture (<xref ref-type="bibr" rid="B3">Araus et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B143">Vandenberghe et&#xa0;al., 2018</xref>).</p>
<p>In agriculture, point cloud technology has been increasingly adopted for diverse applications, such as autonomous machinery navigation and plant phenotyping (<xref ref-type="bibr" rid="B50">Iqbal et&#xa0;al., 2020a</xref>). This study focuses on the latter and explores how point clouds transform modern plant phenomics. Plant phenomics aims to extract qualitative and quantitative traits to enhance and characterize plant phenotypes. Recent advancements in point cloud technology, combined with sophisticated data processing and analysis techniques, are poised to revolutionize plant phenotyping by improving the precision and ease of geometric-trait extraction. This high-resolution data are invaluable for breeders evaluating genotype performance in breeding plots and for farmers seeking precise crop management solutions. Researchers have actively conducted experimental studies to develop point-cloud-based methods, focusing on data collection platforms and analysis algorithms. However, most of these studies have been conducted in controlled growth environments, such as laboratories and greenhouses, rather than in open or semi-open field conditions (<xref ref-type="bibr" rid="B71">Langstroff et&#xa0;al., 2022</xref>). This is mainly because to the recommendation that initial research and development are recommended to be conducted in controlled settings to minimize variables and ensure reproducibility. However, real-world production environments present more complex challenges, requiring adaptations and modifications to these methods for successful field deployment (<xref ref-type="bibr" rid="B2">Araus and Cairns, 2014</xref>). Consequently, a comparative overview of advancements in 3D field crop phenotyping versus those conducted in controlled environments is essential.</p>
<p>To clarify, in this survey, phenotyping in controlled growth environments, such as laboratories and experimental greenhouses, is defined as Chamber-Crop Phenotyping (CCP), whereas phenotyping in large open fields or production greenhouses is referred to as Field-Crop Phenotyping (FCP). The key distinction between these categories lies in environmental variability and flexibility regarding the movement and/or rotation of individual plants or groups. CCP environments allow for greater environment control and flexibility, as plants are often grown in pots that can be easily repositioned. However, in FCP environments, crops are typically planted directly in the soil, making it cumbersome to manipulate individual plants, in addition to the variability in environmental variables. Nevertheless, sensor mobility remains comparable across both environments, except for the ability to deploy sensors at high altitudes (&gt;10&#xa0;m) in the FCP, which is typically not feasible in CCP settings. Planting patterns and density are also critical factors in categorizing phenotyping environments, as they influence the potential for occlusion and overlap. Crops planted under optimal production conditions, whether in open fields or greenhouses, are classified as FCP, as they prioritize yield and reflect real-world farming scenarios. Conversely, if plants are deliberately spaced to facilitate easy data collection at the expense of yield, they are categorized as CCP, even if grown in open-field breeding plots. Understanding these distinctions is crucial because planting density and pattern can significantly impact the range of measurable phenotypic traits and the design of analytical algorithms.</p>
<p>In CCP settings, 3D point cloud measuring techniques offer unparalleled precision and control over environmental variables. Researchers can easily manipulate a plant&#x2019;s positioning and/or rotation, lighting, and other parameters to optimize data acquisition. As a result, techniques such as structured light scanning (<xref ref-type="bibr" rid="B120">Rosell-Polo et&#xa0;al., 2015</xref>), laser scanning (<xref ref-type="bibr" rid="B57">Jin et&#xa0;al., 2021</xref>), and photogrammetry (<xref ref-type="bibr" rid="B166">Zhang and Zhang, 2018</xref>) have flourished, enabling high-resolution reconstructions of plant morphology with exceptional accuracy. However, the confinement of experiments to controlled environments may inadvertently limit the generalizability of these results to real-field production scenarios (<xref ref-type="bibr" rid="B71">Langstroff et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B115">Polder and Hofstee, 2014</xref>). In contrast, FCP presents many challenges, including variability in environmental conditions, illumination, terrain, occlusions, and extensive plant populations. These factors introduce complexities not encountered in laboratory settings, necessitating the adaptation of 3D point cloud techniques to accommodate real-world conditions. Additionally, If the sensor is mounted to a mobile agricultural machine, it must withstand mechanical vibrations and shocks in addition to atmospheric distortions such as moisture, dust, varying temperatures, and bright sunlight (<xref ref-type="bibr" rid="B103">Ninomiya, 2022</xref>). While the potential benefits of field-based phenotyping are substantial, the practical implementation of 3D imaging technologies in such environments requires careful consideration of these challenges.</p>
<p>This survey provides a comprehensive comparative review of 3D phenotyping techniques under both CCP and FCP conditions and evaluates their suitability across varying environments. By synthesizing the existing literature, the strengths, limitations, and research gaps of 3D point cloud measurement techniques in both settings are highlighted, focusing on transitioning innovations from controlled environments to real-world agricultural applications. This emphasis on field-based research and development is intended to facilitate the integration of advanced phenotyping technologies into crop production. This review explores advancements in 3D sensing and measurement techniques, the carrier platforms used across environments, and key phenotypic traits for genotype evaluation and precision crop management, concluding with a discussion of the prospects for 3D crop phenotyping using point cloud technology. For details on the processing and analysis techniques, a recent review by <xref ref-type="bibr" rid="B43">Harandi et&#xa0;al. (2023)</xref> is recommended.</p>
</sec>
<sec id="s2">
<label>2</label>
<title>3D vision techniques used in phenotyping</title>
<p>Obtaining precise quality 3D measurements of plant organs and/or structures, such as leaves, stems, and canopies, also known as 3D high-throughput plant phenotyping (HTPP), relies solely on the point cloud quality used in the process. 3D point cloud measurement techniques have gained significant attention in plant and crop phenotyping (<xref ref-type="bibr" rid="B3">Araus et&#xa0;al., 2022</xref>). This growing interest stems from the potential of 3D point cloud technologies to provide detailed spatial information on plant structures, facilitating comprehensive analysis and characterization (<xref ref-type="bibr" rid="B43">Harandi et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B57">Jin et&#xa0;al., 2021</xref>). However, while considerable research has been devoted to achieving more measurable traits under laboratory-based setups, the translation of these techniques to actual field conditions remains relatively underexplored and is limited to a few traits (<xref ref-type="bibr" rid="B71">Langstroff et&#xa0;al., 2022</xref>). This section aims to bridge this gap by conducting a comparative literature study focused on 3D plant and/or crop phenotyping techniques, particularly clarifying the strengths and weaknesses of various measurement techniques under both CCP and FCP environments. This section excludes techniques for anatomical-level phenotyping aimed at retrieving 3D internal structures, such as Magnetic Resonance Imaging (MRI), Positron Emission Tomography (PET), and Computer Tomography (CT), and only concentrates on external morphology extraction techniques, such as laser scanners and photogrammetry.</p>
<p>To facilitate a systematic comparison across technologies and environments, this review adopts a multidimensional evaluation framework. Following the comparative approach outlined by <xref ref-type="bibr" rid="B110">Paulus (2019)</xref>, each 3D sensing technology was evaluated based on six key criteria: (i) achievable accuracy and resolution, (ii) measurable phenotypic traits, (iii) data acquisition throughput, (iv) platform compatibility and constraints, (v) dominant error sources, and (vi) environmental robustness for CCP versus FCP deployment. This framework enables a structured comparison of technologies that differ fundamentally in their operational principles but serve similar phenotyping objectives. <xref ref-type="table" rid="T1"><bold>Table&#xa0;1</bold></xref> summarizes these characteristics across all the technologies discussed in this review, providing a consolidated reference for researchers and practitioners selecting appropriate sensing solutions for their specific phenotyping requirements. The following subsections detail each technology according to this framework, with explicit attention to the comparative performance under controlled and field conditions.</p>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Comprehensive comparison of 3D sensing technologies for plant phenotyping under CCP and FCP conditions.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Technology</th>
<th valign="middle" align="center">Environment</th>
<th valign="middle" align="center">Achievable accuracy</th>
<th valign="middle" align="center">Key measurable traits</th>
<th valign="middle" align="center">Throughput</th>
<th valign="middle" align="center">Platform compatibility</th>
<th valign="middle" align="center">Dominant error sources</th>
<th valign="middle" align="center">Representative Studies</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" rowspan="2" align="center">Laser Triangulation (LTS)</td>
<td valign="middle" align="center">CCP</td>
<td valign="middle" align="left">14 &#xb5;m&#x2013;45 &#xb5;m resolution; R&#xb2; &gt;0.85 for leaf area</td>
<td valign="middle" align="left">Petal thickness, leaf area, plant volume, ear volume, organ-level geometry</td>
<td valign="middle" align="left">Low (single plant)</td>
<td valign="middle" align="left">Articulated arm, turntable, fixed mount</td>
<td valign="middle" align="left">Chlorophyll absorption at 660 nm, leaf translucency, edge effects</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B73">Lee et&#xa0;al. (2013)</xref>; <xref ref-type="bibr" rid="B111">Paulus et&#xa0;al. (2013)</xref>; <xref ref-type="bibr" rid="B15">Cai et&#xa0;al. (2020)</xref></td>
</tr>
<tr>
<td valign="middle" align="center">FCP</td>
<td valign="middle" align="left">cm-level; R&#xb2; = 0.80&#x2013;0.99 for canopy traits</td>
<td valign="middle" align="left">Canopy height, biomass density, leaf area index</td>
<td valign="middle" align="left">Medium (gantry-based)</td>
<td valign="middle" align="left">Gantry systems, mobile platforms (range 0.8 m&#x2013;2.4 m)</td>
<td valign="middle" align="left">Limited range, occlusion, dust, platform vibration</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B28">Ehlert et&#xa0;al. (2008)</xref>; <xref ref-type="bibr" rid="B142">Vadez et&#xa0;al. (2015)</xref>; <xref ref-type="bibr" rid="B146">Virlet et&#xa0;al. (2016)</xref></td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">Multiview Stereo (MVS)</td>
<td valign="middle" align="center">CCP</td>
<td valign="middle" align="left">mm-level; R&#xb2; = 0.87&#x2013;0.99 for height/leaf area</td>
<td valign="middle" align="left">Plant height, leaf area, stem diameter, 3D architecture</td>
<td valign="middle" align="left">Medium (1&#x2013;2 min/plant)</td>
<td valign="middle" align="left">Multi-camera rig, turntable, robotic arm</td>
<td valign="middle" align="left">Texture-less regions, processing time, calibration errors</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B70">Kumar et&#xa0;al. (2014)</xref>; <xref ref-type="bibr" rid="B119">Rose et&#xa0;al. (2015)</xref>; <xref ref-type="bibr" rid="B154">Wu et&#xa0;al. (2020)</xref></td>
</tr>
<tr>
<td valign="middle" align="center">FCP</td>
<td valign="middle" align="left">cm-level; R&#xb2; = 0.78&#x2013;0.99 for canopy traits</td>
<td valign="middle" align="left">Canopy height, plot-level biomass, leaf angle, row structure</td>
<td valign="middle" align="left">High (hectares/hour via UAV)</td>
<td valign="middle" align="left">UAV, ground robot, handheld</td>
<td valign="middle" align="left">Wind-induced motion, variable lighting, dense canopy occlusion</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B68">Klodt et&#xa0;al. (2015)</xref>; <xref ref-type="bibr" rid="B7">Bao et&#xa0;al. (2016)</xref>; <xref ref-type="bibr" rid="B155">Xiang et&#xa0;al. (2023)</xref></td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">Time-of-Flight (ToF)</td>
<td valign="middle" align="center">CCP</td>
<td valign="middle" align="left">cm-level; 10%&#x2013;14% mean error for canopy traits</td>
<td valign="middle" align="left">Plant height, stem diameter, canopy volume, dynamic growth</td>
<td valign="middle" align="left">High (real-time)</td>
<td valign="middle" align="left">Ground robot, UAV, tractor-mounted</td>
<td valign="middle" align="left">Sunlight saturation, limited range, motion artifacts</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B122">Ruckelshausen et&#xa0;al. (2009)</xref>; <xref ref-type="bibr" rid="B160">Young et&#xa0;al. (2019)</xref>; <xref ref-type="bibr" rid="B31">Fan et&#xa0;al. (2022)</xref></td>
</tr>
<tr>
<td valign="middle" align="center">FCP</td>
<td valign="middle" align="left">cm-level; 10%&#x2013;14% mean error for canopy traits</td>
<td valign="middle" align="left">Plant height, stem diameter, canopy volume, dynamic growth</td>
<td valign="middle" align="left">High (real-time)</td>
<td valign="middle" align="left">Ground robot, UAV, tractor-mounted</td>
<td valign="middle" align="left">Sunlight saturation, limited range, motion artifacts</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B122">Ruckelshausen et&#xa0;al. (2009)</xref>; <xref ref-type="bibr" rid="B160">Young et&#xa0;al. (2019)</xref>; <xref ref-type="bibr" rid="B31">Fan et&#xa0;al. (2022)</xref></td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">Terrestrial Laser Scanning (TLS)</td>
<td valign="middle" align="center">CCP</td>
<td valign="middle" align="left">sub-mm to mm; R&#xb2; &gt;0.90 for organ traits</td>
<td valign="middle" align="left">Detailed plant architecture, leaf traits, organ segmentation</td>
<td valign="middle" align="left">Low-Medium (static scanning)</td>
<td valign="middle" align="left">Tripod, fixed post</td>
<td valign="middle" align="left">Occlusion requiring multiple positions, cost</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B149">Wang et&#xa0;al. (2018)</xref>; <xref ref-type="bibr" rid="B107">Panjvani et&#xa0;al. (2019)</xref>; <xref ref-type="bibr" rid="B108">Patel et&#xa0;al. (2023)</xref></td>
</tr>
<tr>
<td valign="middle" align="center">FCP</td>
<td valign="middle" align="left">cm-level; RMSE 5 cm&#x2013;6 cm for plant height</td>
<td valign="middle" align="left">Canopy height, biomass, structural metrics, plot-level traits</td>
<td valign="middle" align="left">High (mobile/backpack)</td>
<td valign="middle" align="left">Tripod, vehicle-mounted, backpack, UAV</td>
<td valign="middle" align="left">Ground-canopy separation, wind sensitivity, point density variation</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B34">Friedli et&#xa0;al. (2016)</xref>; <xref ref-type="bibr" rid="B20">Deery et&#xa0;al. (2020)</xref>; <xref ref-type="bibr" rid="B106">Pan et&#xa0;al. (2022)</xref></td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">Structured Light (SL)</td>
<td valign="middle" align="center">CCP</td>
<td valign="middle" align="left">mm-level; &lt;13&#xa0;mm error; R&#xb2; &gt;0.9 for leaf area</td>
<td valign="middle" align="left">Leaf area, stress response, high-resolution surface geometry</td>
<td valign="middle" align="left">Medium (single plant)</td>
<td valign="middle" align="left">Fixed setup, controlled lighting required</td>
<td valign="middle" align="left">Ambient light interference, calibration sensitivity</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B98">Nam et&#xa0;al. (2014)</xref>; <xref ref-type="bibr" rid="B101">Nguyen et&#xa0;al. (2015</xref>, <xref ref-type="bibr" rid="B100">2016a)</xref></td>
</tr>
<tr>
<td valign="middle" align="center">FCP</td>
<td valign="middle" align="left">cm-level; R&#xb2; = 0.99 under low ambient light</td>
<td valign="middle" align="left">Canopy structure (low-light conditions only)</td>
<td valign="middle" align="left">Low (dawn/dusk/night only)</td>
<td valign="middle" align="left">Mobile platform (restricted operation)</td>
<td valign="middle" align="left">High ambient light severely degrades performance</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B120">Rosell-Polo et&#xa0;al. (2015)</xref></td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">Light Field (LF)</td>
<td valign="middle" align="center">CCP</td>
<td valign="middle" align="left">Limited depth range (10 cm&#x2013;50 cm); qualitative</td>
<td valign="middle" align="left">Stem and leaf morphology, post-capture refocusing</td>
<td valign="middle" align="left">Low (large files, heavy processing)</td>
<td valign="middle" align="left">Fixed mount, careful calibration required</td>
<td valign="middle" align="left">Limited FOV, depth resolution, calibration complexity</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B115">Polder and Hofstee (2014)</xref></td>
</tr>
<tr>
<td valign="middle" align="center">FCP</td>
<td valign="middle" align="left">Limited; 4.33 average deviation error for height</td>
<td valign="middle" align="left">Plant height (short-range only)</td>
<td valign="middle" align="left">Very Low</td>
<td valign="middle" align="left">Ground robot (experimental)</td>
<td valign="middle" align="left">Short effective range, computational requirements, cost</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B127">Schima et&#xa0;al. (2016)</xref></td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>Accuracy values represent typical ranges reported in literature; actual performance varies with sensor model, plant species, and experimental conditions. R&#xb2; values indicate correlation with manual/reference measurements. CCP, Chamber-Crop Phenotyping; FCP, Field-Crop Phenotyping. Throughput categories: Low (&lt;10 plants/h), Medium (10&#x2013;100 plants/h), High (&gt;100 plants/hour or continuous field coverage).</p></fn>
</table-wrap-foot>
</table-wrap>
<sec id="s2_1">
<label>2.1</label>
<title>Laser triangulation</title>
<p>One of the most employed principles in low-cost 3D measurements is laser triangulation scanning (LTS) owing to its straightforwardness and robustness. The design of the scanner relies on basic trigonometry. Laser triangulation employs non-contact optical methods to capture accurate 3D dimensional information of an object or its surface. It operates based on trigonometric triangulation, projecting a laser beam onto the target surface at a predetermined angle with respect to an imaging sensor or camera. The core principle of LTS is the projection of a laser beam onto the target surface at a calculated angle relative to an imaging sensor (<xref ref-type="fig" rid="f1"><bold>Figure&#xa0;1</bold></xref>). As the reflected laser light interacts with the surface, intricate details are captured and analyzed, enabling accurate spatial 3D reconstructions. This method is prized for its high precision (in &#xb5;m) and low cost, making it an attractive option for phenotyping plants. <xref ref-type="bibr" rid="B95">Malhotra et&#xa0;al. (2011)</xref> and <xref ref-type="bibr" rid="B128">Schlarp et&#xa0;al. (2019)</xref> detailed the principles and algorithms underpinning laser triangulation, emphasizing its capability to achieve high-resolution 3D profiling with an accuracy range of 15 &#x3bc;m, as noted by <xref ref-type="bibr" rid="B26">Dupuis and Kuhlmann (2014)</xref>. Its rapid data acquisition and non-destructive nature make laser triangulation ideal for high-throughput phenotyping platforms, facilitating continuous monitoring of plant development and stress responses.</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>Laser triangulation measurement principle: A laser beam (red) is projected onto the target surface, and the reflection angle (&#x3b8;) is determined with respect to the imaging sensor.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1731852-g001.tif">
<alt-text content-type="machine-generated">Diagram showing a laser emitter and imaging sensor system. The laser emits a red beam through a lens towards a surface with green plants. An imaging sensor captures reflected light. The diagram includes measurement labels and a three-axis coordinate system.</alt-text>
</graphic></fig>
<p>For CCP applications, LTS has primarily been utilized to obtain precise 3D measurements of various plant traits, facilitating the analysis of individual plant growth, health, and other phenotypic characteristics. One of the key applications of laser triangulation in plant phenotyping is the accurate measurement of plant volumes and surface features. For instance, the 3D reconstruction of plants, such as potatoes, can be achieved through laser triangulation, allowing for precise volume measurements. This is particularly beneficial for phenotyping tasks, such as grading potatoes based on their size and weight, which are directly related to their volume (<xref ref-type="bibr" rid="B15">Cai et&#xa0;al., 2020</xref>). According to <xref ref-type="bibr" rid="B15">Cai et&#xa0;al. (2020)</xref>, their process involved using a monocular camera and a line LTS laser to scan the surface of the potato and capture detailed coordinates that were used for 3D reconstruction and volume calculation.</p>
<p>Another important application is the measurement of leaf and petal thickness, which are critical indicators of plant health. Traditional contact-based methods often damage the delicate leaf and petal surfaces. In contrast, laser scanning provides a non-contact solution that can accurately measure these parameters in real time. For example, a device using dual laser triangulation was used to measure the thickness of Phalaenopsis petals with a resolution of 2 &#x3bc;m/pixel and a total measurement uncertainty of less than 16 &#x3bc;m (<xref ref-type="bibr" rid="B73">Lee et&#xa0;al., 2013</xref>). Their approach employed a cubic spline technique to fit the measured points on the petal surface, ensuring precise thickness calculation.</p>
<p>Moreover, laser triangulation is employed in high-throughput CCP to evaluate the geometric parameters of plants, such as the shapes and sizes of leaves and other organs. It allows the classification and parameterization of plant parts by analyzing 3D point clouds generated from laser scans. These point clouds can be processed to extract surface features and differentiate between various plant organs. The high automation and accuracy of this approach make it suitable for large-scale phenotyping studies, where quick and reliable data acquisition is essential (<xref ref-type="bibr" rid="B111">Paulus et&#xa0;al., 2013</xref>).</p>
<p>Although to a lesser extent, for FCP applications, several studies have highlighted the effectiveness of LTS in capturing detailed phenotypic data essential for understanding plant responses in their natural environments, mostly presented as integrated phenotyping platforms. One notable application is the LeasyScan platform (<xref ref-type="bibr" rid="B142">Vadez et&#xa0;al., 2015</xref>), which combines 3D LTS imaging with lysimetric measurements to assess canopy traits affecting water use, such as leaf area and transpiration rate. This platform continuously captures leaf area development and integrates gravimetric data, providing high-throughput and precise measurements that are critical for drought adaptation studies. Under field conditions, the LeasyScan platform achieved strong correlations between scanned and observed leaf area data (R&#xb2; = 0.80&#x2013;0.99 across various crops), with a measurement precision within 5% of the reference values (<xref ref-type="bibr" rid="B142">Vadez et&#xa0;al., 2015</xref>). Similarly, the Field Scanalyzer platform reported plant height estimation accuracy with an RMSE of 1.88&#xa0;cm and R&#xb2; = 0.97 when validated against manual measurements (<xref ref-type="bibr" rid="B146">Virlet et&#xa0;al., 2016</xref>). These results demonstrate the potential of LTS-based gantry systems to achieve centimeter-level accuracy in field phenotyping when the sensor-to-canopy distance is controlled.</p>
<p>Similar to LeasyScan, the Field Scanalyzer (<xref ref-type="bibr" rid="B146">Virlet et&#xa0;al., 2016</xref>) is an automated robotic field phenotyping platform. It employs a comprehensive sensor array, including dual 3D LTS scanners, to monitor crop performance at high temporal and spatial resolutions. This platform facilitates detailed measurements of canopy development and growth stages throughout the crop life cycle. By integrating multiple sensors, the Field Scanalyzer provides a robust dataset that supports the identification of key growth stages and specific growth measurements, contributing to more precise crop monitoring and breeding efforts. <xref ref-type="bibr" rid="B28">Ehlert et&#xa0;al. (2008)</xref> further demonstrated that vehicle-mounted laser scanners could estimate crop biomass density in field trials with R&#xb2; values ranging from 0.93 to 0.99, though accuracy decreased at plant densities exceeding 200 plants/m&#xb2; due to mutual shading and occlusion effects.</p>
<p>Although laser triangulation offers significant advantages for 3D phenotyping of plants, several studies have highlighted its limitations. These limitations can be inherent in the hardware capability or originate from the interactions between the laser system, plant characteristics, and specific measurement environment challenges. The most well-known hardware inherent limitation of LTS is short range (usually within a few ten to hundred centimeters), such as LASE ODS 1600 HT 2 (Danish company, LASE<sup>&#xae;</sup>) with a measuring range of 0.80 m&#x2013;2.4 m (<xref ref-type="bibr" rid="B28">Ehlert et&#xa0;al., 2008</xref>). <xref ref-type="bibr" rid="B112">Paulus et&#xa0;al. (2014)</xref> investigated the influence of species, leaf chlorophyll content, and sensor settings on the accuracy of a 660 nm active laser triangulation scanning device. They found that the accuracy of the surface images varied significantly with leaf chlorophyll concentration and sensor exposure time. For example, the leaves of <italic>Ficus benjamina</italic> with low chlorophyll concentrations and long sensor exposure times yielded inaccurate surface images. Conversely, the rough, waxy surface of leeks (<italic>Allium porrum</italic>) can be accurately imaged using very low exposure times. However, longer exposure times result in penetration and multiple refractions, preventing accurate surface imaging. These findings suggest that plant properties and sensor settings must be carefully considered to achieve high accuracy in laser imaging for tasks such as monitoring plant growth and assessing responses to water stress.</p>
<p><xref ref-type="bibr" rid="B27">Dupuis et&#xa0;al. (2015)</xref> examined the impact of different leaf surface tissues on the accuracy of 3D laser triangulation measurement. They compared two triangulation-based 3D laser scanners with different wavelengths (658 nm red and 405 nm blue) and found that the intensity of reflection from backscattered laser rays provided valuable insights into both the geometric accuracy and physiological conditions of plants. The study revealed that red lasers showed high interpretability in terms of tissue composition, whereas blue lasers provided higher geometric accuracy. However, the interaction with leaf tissues and the resulting absorption of the laser can affect the measurement accuracy. The ability to identify plant diseases, such as powdery mildew, and analyze tissue composition and leaf senescence stages using intensity data was demonstrated. However, these interactions highlight the need for further refinement of laser triangulation to achieve precise plant phenotyping.</p>
<p><xref ref-type="bibr" rid="B66">Klapa and Mitka (2017)</xref> discussed the edge effect, a measurement error arising from the reflection of the laser beam on adjacent walls or its diffraction at the edges. This effect leads to incorrect positioning of points in space due to the averaging of measurements from multiple areas. This study presents case studies showing the discrepancy between the corner points in the models and the actual curved surface of the point clouds. This edge effect can significantly impact the quality and accuracy of measurements, emphasizing the need for improved methods to mitigate such errors in 2D and 3D laser scanning of plant structures.</p>
<p>In summary, laser triangulation demonstrated markedly different performance characteristics in CCP and FCP environments. Under controlled conditions, LTS achieves exceptional precision (14 &#xb5;m&#x2013;45 &#xb5;m resolution; <xref ref-type="bibr" rid="B26">Dupuis and Kuhlmann, 2014</xref>) and strong correlations with manual measurements (R&#xb2; = 0.85&#x2013;0.97 for morphological parameters; <xref ref-type="bibr" rid="B112">Paulus et&#xa0;al., 2014</xref>), making it well-suited for organ-level phenotyping of individual plants, including leaf area, petal thickness, and volumetric traits. However, these advantages are contingent upon the careful optimization of sensor exposure settings relative to leaf optical properties, particularly chlorophyll concentration and surface characteristics. In FCP settings, the utility of this technology is constrained by its limited operational range (typically 0.8 m&#x2013;2.4 m; <xref ref-type="bibr" rid="B28">Ehlert et&#xa0;al., 2008</xref>), susceptibility to environmental interference (dust, vibration), and challenges with canopy occlusion. Field applications have therefore focused predominantly on gantry-mounted systems (e.g., LeasyScan, Field Scanalyzer), where controlled sensor-to-plant distances can be maintained, achieving R&#xb2; values of 0.80&#x2013;0.99 for canopy-level traits, such as leaf area index and biomass density. The key trade-off between environments involves precision versus scalability: CCP enables micrometer-level accuracy on individual organs, whereas FCP deployments sacrifice fine-scale resolution for plot-level throughput under the constraint of fixed infrastructure requirements.</p>
</sec>
<sec id="s2_2">
<label>2.2</label>
<title>Multiview stereo reconstruction</title>
<p>Multiview stereo (MVS) reconstruction is a well-established technology that has demonstrated significant potential for 3D modeling of plants since the mid-90s when the authors of <xref ref-type="bibr" rid="B52">Ivanov et&#xa0;al. (1995)</xref> first obtained an aerial 3D reconstructed model of maize canopy, enabling them to estimate leaf position and orientation and leaf area distribution. MVS takes advantage of multiple cameras positioned around the target plant to capture images from different viewpoints (<xref ref-type="fig" rid="f2"><bold>Figure&#xa0;2</bold></xref>), which are then processed to reconstruct a 3D point cloud representation of the plant structure. This technique offers several advantages over traditional 3D scanning methods, including the ability to capture data rapidly and the use of relatively low-cost camera equipment (<xref ref-type="bibr" rid="B100">Nguyen et&#xa0;al., 2016a</xref>; <xref ref-type="bibr" rid="B154">Wu et&#xa0;al., 2020</xref>), making it relatively affordable and easily scalable with minimal overhead. It is evident in the literature that MVS has been widely used for phenotyping single plants under CCP compared to multiple plants in open-field conditions (FCP). Several aspects of MVS have been investigated in these studies to ascertain its suitability and competitiveness with other approaches.</p>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>Multiview Stereo (MVS) Reconstruction: A series of images is captured from different viewpoints using multiple cameras around the target plant.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1731852-g002.tif">
<alt-text content-type="machine-generated">3D diagram showing a central plant on a square surface with purple x, y, and z axes. Surrounding it are eight gray panels labeled View_1 to View_8, each displaying stylized leaf shapes from different angles.</alt-text>
</graphic></fig>
<p>The most attractive aspect of MVS for CCP and FCP applications is its cost. This is because MVS often takes advantage of readily available consumer color cameras, which are passive and thus require no extra lighting system, except in some studies that explore the use of structured light in combination with stereo cameras (<xref ref-type="bibr" rid="B100">Nguyen et&#xa0;al., 2016a</xref>). <xref ref-type="bibr" rid="B89">Lou et&#xa0;al. (2014)</xref> investigated the accuracy of multiview stereo 3D reconstruction for cost-effective, non-destructive plant phenotyping.</p>
<p>The experiment was conducted under CCP conditions, and each plant was scanned individually. Their main contribution was the development of a dense 3D reconstruction method that excelled in producing accurate 3D point clouds of various plants while retaining colors, textures, and shapes, compared to the earlier proposed methods by <xref ref-type="bibr" rid="B35">Furukawa and Ponce (2009)</xref> and <xref ref-type="bibr" rid="B53">Jancosek and Pajdla (2011)</xref>. However, their approach still suffers from the effects of occlusions, texture-less regions, and blurred images, resulting in significant gaps or holes in the final 3D model of the plants. Additionally, the proposed method was relatively slow as the number of images to be processed increased. Similarly, <xref ref-type="bibr" rid="B79">Li et&#xa0;al. (2017)</xref>; <xref ref-type="bibr" rid="B121">Rossi et&#xa0;al. (2020)</xref>, and <xref ref-type="bibr" rid="B154">Wu et&#xa0;al. (2020)</xref> presents related narratives around the cost-effectiveness of the MVS phenotyping approach under CCP conditions. Collectively, these studies highlight the significant cost advantages of MVS phenotyping systems, while maintaining high accuracy and efficiency. The first study demonstrated a low-cost, portable stereo vision system that utilized high-definition webcams costing less than $70 and a laptop, and employed advanced algorithms to achieve robust and accurate 3D imaging under varying illumination conditions. Similarly, the MVS-Pheno platform automates image capture and 3D reconstruction of maize shoots in the field, combining affordability with high-throughput efficiency and strong correlations with manual measurements. Finally, the evaluation of a platform using low-cost sensors and cameras showed high accuracy in extracting morphological traits for multiple crops, reinforcing the potential of MVS systems for high-resolution plant phenotyping at minimal cost. These studies illustrate how MVS technology democratizes access to advanced phenotyping, enabling broader agricultural research and practice adoption by leveraging affordable hardware and sophisticated algorithms, without compromising data quality.</p>
<p>Although cost-effectiveness has dramatically inspired the adoption of MVS for 3D plant phenotyping, there are some associated drawbacks that are inherently to this technology. One such drawback is the compromise in processing speed because MVS requires secondary algorithms to convert image pairs into 3D point clouds. This introduces a processing overhead and can potentially require advanced computing capabilities to run the underlying reconstruction algorithms in cases where high-definition images are captured. Studies such as that of <xref ref-type="bibr" rid="B70">Kumar et&#xa0;al. (2014)</xref> have attempted to improve this drawback under CCP conditions by designing an easy-to-use camera calibration for single-axis motion coupled with a visual hull algorithm (<xref ref-type="bibr" rid="B72">Laurentini, 1995</xref>) for 3D reconstruction. Their approach achieved significant improvement in retrieving phenotypic quality 3D volumetric reconstruction with an acquisition time of less than a minute per potted plant compared to the previous study by Nguyen et&#xa0;al. (2016), which demonstrated an acquisition time of up to 2&#xa0;min.</p>
<p>Second, the processing time required is the issue regarding the point cloud resolution/quality retrieved via MVS setups. Although MVS systems, particularly low-cost ones, can be limited in point cloud resolution due to factors such as camera quality, algorithmic challenges, baseline distance, environmental conditions, processing power, data coverage, and comparison to other high-resolution 3D imaging technologies such as LiDAR or structured light scanners, several studies have demonstrated that MVS can potentially attain high point resolution, especially when high-end hardware components coupled with complex algorithms are used (<xref ref-type="bibr" rid="B67">Klodt and Cremers, 2015</xref>; <xref ref-type="bibr" rid="B119">Rose et&#xa0;al., 2015</xref>). Moreover, <xref ref-type="bibr" rid="B119">Rose et&#xa0;al. (2015)</xref> demonstrated that the performance of the MVS system with high-end cameras competes favorably with the close-up triangulation line scanner Perceptron v5, which is superior in point accuracy and resolution of up to 14 &#xb5;m. However, this exponentially increases the cost and computational complexity of MVS systems, which is undesirable for large-scale and real-time applications. Nevertheless, low-cost MVS systems remain valuable for many applications because of their affordability and flexibility. By understanding and mitigating these limitations in future studies, the resolution and accuracy of low-cost MVS-generated point clouds can be significantly improved, making them suitable for a broader range of phenotyping tasks, even under field conditions (FCP).</p>
<p>Some earlier studies have expressed reservations about the suitability of LiDAR for plant phenotyping. For instance, <xref ref-type="bibr" rid="B88">Lou et&#xa0;al. (2015)</xref> stated that &#x201c;the 3D LASER/LIDAR scanner or the structured-light scanner (including Kinect sensor) do not work well on plants, especially on complex or even marginally occluded specimens or on small plants&#x201d; (p. 555). While this assessment lacked experimental validation in their study, it likely reflected the genuine limitations of the LiDAR technology available at that time. <xref ref-type="bibr" rid="B80">Lin (2015)</xref>, in a contemporaneous review, similarly acknowledged that &#x201c;the currently-available LiDAR forms cannot effectively support the development of the next-generation techniques of plant phenotyping,&#x201d; identifying the need for higher-density, full-waveform, and hyperspectral LiDAR variants then under development.</p>
<p>Several factors may have contributed to the challenges encountered with early LiDAR systems in plant phenotyping applications: (i) lower point densities in commercial terrestrial scanners circa 2010&#x2013;2015, which were insufficient to resolve fine plant structures such as thin stems, small leaves, and complex branching patterns; (ii) limited algorithmic development for plant-specific point cloud processing, as early applications drew primarily from forestry and surveying domains where target geometries differ substantially from agricultural crops; (iii) the high cost of research-grade LiDAR equipment, which restricted access and limited systematic evaluation across diverse plant architectures; and (iv) specific experimental conditions, as performance varies considerably with plant species, growth stage, and scanning geometry.</p>
<p>Subsequent technological advancements have substantially improved the applicability of LiDAR to plant phenotyping. Higher-density scanning, multi-return signal processing, and dedicated algorithms for plant structure analysis have enabled successful applications across diverse crops, achieving centimeter-level accuracy for canopy traits and supporting large-scale field phenotyping (<xref ref-type="bibr" rid="B57">Jin et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B168">Zhu et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B108">Patel et&#xa0;al., 2023</xref>). The development of mobile and backpack-mounted LiDAR systems has further enhanced accessibility and throughput, addressing previous concerns regarding the practicality of this technology for routine phenotyping applications. Thus, while early skepticism reflected the real limitations of the technology at that time, the current state of LiDAR-based phenotyping demonstrates that these challenges have been substantially overcome through continued sensor development and algorithmic innovation.</p>
<p>Another aspect of MVS systems is the possibility of stereo spectral imaging, which makes it easy to render spectral information in 3D space, allowing for 3D plant health analyses and characterizations. This advantage is particularly interesting for field crop monitoring (FCP); however, the present studies have demonstrated it under CCP conditions only. For instance, <xref ref-type="bibr" rid="B159">Yoon and Thai (2010)</xref> presented an efficient approach to a stereo spectral imaging system for plant health characterization using a tunable stereo camera to switch between the NIR-band and Red-band alongside un-filtered raw stereo images. They used raw stereo images to match and reconstruct the 3D model of the plant, while the NIR and red-filtered stereo images were used for NDVI computation. Additionally, NIR stereo images were used for foreground object segmentation because background clutter was better suppressed by the NIR filter. Similarly, <xref ref-type="bibr" rid="B125">Santos et&#xa0;al. (2015)</xref> used spectral clustering to automatically segment plant leaves in point clouds. More details on spectral clustering can be found in <xref ref-type="bibr" rid="B99">Ng et&#xa0;al. (2001)</xref>; <xref ref-type="bibr" rid="B130">Shi and Malik (2000)</xref>, and <xref ref-type="bibr" rid="B147">Von Luxburg (2007)</xref>.</p>
<p>Due to rapidly advancing hardware computational capabilities, MVS is increasingly integrated with robotic systems and advanced deep learning algorithms to improve plant phenotyping using camera-based approaches. The integration of MVS reconstruction with robotic platforms offers several key advantages for high-throughput plant phenotyping, including faster and more comprehensive data acquisition, reduced human labor, and the ability to capture 3D measurements of complex plant structures in their natural field environments (<xref ref-type="bibr" rid="B22">Dengyu et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B39">Gibbs et&#xa0;al., 2019</xref>). Similarly, the integration of deep learning techniques with MVS reconstruction has been shown to offer significant advantages for plant phenotyping applications. Deep-learning-based MVS approaches can automate critical tasks, such as feature extraction, cost volume regularization, and depth map inference, leading to more efficient and robust 3D plant model reconstruction compared to traditional MVS pipelines (<xref ref-type="bibr" rid="B37">Gao et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B158">Yang et&#xa0;al., 2024</xref>). These advancements facilitate high-throughput, precise, and nondestructive plant phenotyping measurements, which are key requirements for advancing modern agriculture and crop breeding efforts.</p>
<p>To this point, aspects and applications of MVS systems in plant phenotyping have been discussed, with most references focusing on studies conducted under CCP use cases. However, FCP often presents complex challenges, which makes it cumbersome to advance all possible CCP plant trait measurements to field conditions. The major limitation in the field is plant density, which results in heavy occlusion, making it challenging to image individual plants. For ground-based systems, researchers often extract target features on a row-plot basis instead of individual plants because of the severe occlusion caused by densely clustered leaves (<xref ref-type="bibr" rid="B7">Bao et&#xa0;al., 2016</xref>).</p>
<p>Additionally, plants in their natural habitat are fixed, so moving or rotating them is not an option to change their orientation. Therefore, coupled with the occlusion problem, the plant&#x2019;s natural environment often presents a complex environment for FCP application. Moreover, the fields are usually large and consist of plant populations ranging from several hundred to millions of individual plants per acre, depending on the species. These complexities and the abundance of species in the field remain an underexplored challenge for phenotyping plants in their natural habitat. Several approaches have been adopted to minimize the effects of phenotyping plants in their natural habitat, including the use of mobile ground-based platforms, such as <italic>Vinobot</italic> in <xref ref-type="bibr" rid="B7">Bao et&#xa0;al. (2016)</xref> to capture organ-level phenotypic traits located at the middle and bottom of the plant canopy, fixed imaging towers, such as <italic>vinoculer</italic>, also described in <xref ref-type="bibr" rid="B129">Shafiekhani et&#xa0;al. (2017)</xref>, and in some cases, UAVs (<xref ref-type="bibr" rid="B24">Di Gennaro and Matese, 2020</xref>) are employed to carry the imaging sensors to capture canopy-level information. Several other studies have also adopted robot-based platforms, such as the one designed by <xref ref-type="bibr" rid="B6">Bao et&#xa0;al. (2019a)</xref> to phenotype sorghum plant architecture, including plant height, stem diameter, leaf angle, leaf area, leaf number, and panicle size. Additional examples of robotics applications in field phenotyping can be found in the studies by <xref ref-type="bibr" rid="B55">Jay et&#xa0;al. (2014)</xref>; <xref ref-type="bibr" rid="B63">Kim et&#xa0;al. (2021)</xref>; <xref ref-type="bibr" rid="B133">Sodhi et&#xa0;al. (2017)</xref>, and <xref ref-type="bibr" rid="B155">Xiang et&#xa0;al. (2023)</xref>.</p>
<p>However, it is worth noting that occlusion is not always the major problem under field conditions, depending on the target crop species and growth stage. For instance, <xref ref-type="bibr" rid="B68">Klodt et&#xa0;al. (2015)</xref> used stereo reconstruction to estimate dense depth maps to distinguish grapevines in the foreground from other field plants in the background. Their objective was to correct the challenge of segmenting the foreground and background associated with RGB imaging of grapes in the vineyard.</p>
<p>Quantitative validation of MVS under field conditions revealed achievable accuracy levels that, while reduced compared to the CCP, remain suitable for many phenotyping applications. <xref ref-type="bibr" rid="B68">Klodt et&#xa0;al. (2015)</xref> reported RMSE values of approximately 3.0% for grapevine canopy volume estimation under field conditions. For cereal crops, UAV-based MVS achieves plant height estimation with R&#xb2; = 0.91&#x2013;0.98 and RMSE&#xa0;=&#xa0;2.6 cm&#x2013;9.0 cm, depending on flight altitude, camera specifications, and growth stage (<xref ref-type="bibr" rid="B93">Madec et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B46">Holman et&#xa0;al., 2016</xref>). <xref ref-type="bibr" rid="B63">Kim et&#xa0;al. (2021)</xref> demonstrated height estimation accuracy of R&#xb2; = 0.78&#x2013;0.84 for maize and sorghum under field conditions, with performance degrading at later growth stages due to increased canopy complexity. These studies indicate that MVS-based field phenotyping typically achieves centimeter-level accuracy for canopy-level traits, representing approximately one order of magnitude reduction in precision compared with controlled environment applications.</p>
<p>Furthermore, in the context of FCP, one of the challenges encountered is the impact of environmental factors, such as wind, which introduces a significant degree of uncertainty. Wind causes plants to move non-rigidly, resulting in dynamic and unpredictable motion. This complicates the application of traditional structure-from-motion (SfM) techniques, which generally rely on the assumption that the objects being analyzed are static or have minimal motion. Consequently, the inherent assumptions of these techniques are violated, making it difficult to accurately capture and analyze the crop structure in a natural field environment.</p>
<p>While most studies do not account for the effect of wind, some earlier studies, such as that of <xref ref-type="bibr" rid="B10">Biskup et&#xa0;al. (2007)</xref>, attempted to mitigate this issue by stereomicroscopy of plants under outdoor conditions. This study emphasized the importance of synchronously triggering cameras for outdoor measurements, noting that plants are highly susceptible to wind and that successful stereo matching requires a rigid scene. Their field measurements with soybeans under various wind conditions demonstrated that reconstruction remained reliable in moderate wind and with a moving canopy; however, it failed in stormy conditions. This highlights the need for advanced techniques and considerations in FCP to handle the complexities introduced by environmental factors such as wind.</p>
<p>Expanding on this, a later study by <xref ref-type="bibr" rid="B109">Paturkar et&#xa0;al. (2019)</xref> investigated the effect of wind on the stereo reconstruction of plants under outdoor conditions. Their analysis revealed several adverse scenarios that present challenges and require further investigation. One such scenario involves acquiring images of plants in windy conditions, where plant movement leads to numerous feature-matching errors, resulting in poor 3D models. Specifically, the resulting models lacked essential details in the stem area and included only partially reconstructed leaves. A potential solution is to detect and filter out images with inconsistent matches caused by wind.</p>
<p>Additionally, this study explored the impact of changing light conditions, such as those caused by moving clouds. They found that drastic changes in illumination during image capture led to 3D models missing critical information about the plant surface and leaves, resulting in blank patches. To mitigate this issue, we proposed preprocessing and normalizing the acquired images to reduce the effects of illumination changes. These findings underscore the importance of addressing environmental factors such as wind and variable lighting in FCP. Overall, the development of MVS-based 3D reconstruction has been an essential advancement in plant phenotyping, providing a means to efficiently capture the detailed architectural traits of both laboratory-level and field-grown crops.</p>
<p>In summary, MVS reconstruction exhibited distinct advantages and limitations in phenotyping environments. In CCP settings, MVS systems achieve millimeter-level accuracy (R&#xb2; = 0.87&#x2013;0.99 for height, leaf area, and stem diameter; <xref ref-type="bibr" rid="B119">Rose et&#xa0;al., 2015</xref>; <xref ref-type="bibr" rid="B154">Wu et&#xa0;al., 2020</xref>) while maintaining cost-effectiveness through the use of consumer-grade cameras. The ability to control lighting, eliminate wind effects, and rotate plants for complete coverage enables high-fidelity 3D reconstructions that are suitable for detailed architectural analysis. Processing time (1 min&#x2013;2 min per plant; <xref ref-type="bibr" rid="B70">Kumar et&#xa0;al., 2014</xref>) and computational demands represent the primary constraints, although these are increasingly mitigated by advances in GPU-accelerated algorithms. Under FCP conditions, MVS faces substantial challenges: wind-induced plant motion violates the static scene assumption underlying structure-from-motion algorithms (<xref ref-type="bibr" rid="B10">Biskup et&#xa0;al., 2007</xref>), variable illumination causes feature-matching failures (<xref ref-type="bibr" rid="B109">Paturkar et&#xa0;al., 2019</xref>), and dense canopy occlusion limits individual plant resolution. Consequently, FCP applications typically achieve centimeter-level accuracy (R&#xb2; = 0.78&#x2013;0.99 for canopy traits; <xref ref-type="bibr" rid="B68">Klodt et&#xa0;al., 2015</xref>; <xref ref-type="bibr" rid="B63">Kim et&#xa0;al., 2021</xref>) and focus on plot-level rather than organ-level phenotyping. The recurring trade-off involves data quality versus acquisition flexibility: CCP enables controlled, high-resolution imaging at the cost of ecological validity, whereas FCP captures field-relevant phenotypes with reduced geometric precision and increased susceptibility to environmental artifacts.</p>
</sec>
<sec id="s2_3">
<label>2.3</label>
<title>Time-of-flight cameras</title>
<p>Time-of-flight (ToF) cameras represent a cutting-edge imaging and distance measurement technology. Unlike traditional cameras that capture 2D images based on color and intensity, ToF cameras measures the time it takes for light to travel from the camera to the object and back. This allows them to create depth maps and 3D representations of the scene (<xref ref-type="bibr" rid="B40">Gokturk et&#xa0;al., 2004</xref>; <xref ref-type="bibr" rid="B62">Keller and Kolb, 2009</xref>). Two primary approaches are used in time-of-flight (ToF) systems, each offering unique advantages for different applications. Direct ToF (dToF) cameras emit a brief light pulse lasting only a few nanoseconds and directly measure the time delay between the emission of the light pulse and its reflection from an object, calculating the distance based on the speed of light. Indirect ToF (iToF) cameras, on the other hand, emit continuously modulated light pulses (diffuse laser illumination) and measure the phase shift in the frequency of the reflected light to determine the distance to an object (<xref ref-type="bibr" rid="B75">Li, 2014</xref>; <xref ref-type="bibr" rid="B105">Padmanabhan et&#xa0;al., 2019</xref>). This method is beneficial for measuring the entire scene of objects close to the camera and allows iToF cameras to achieve higher frame rates. ToF cameras offer several advantages, including high frame rates, real-time depth information capture, and robustness under various lighting conditions. These characteristics make them suitable for multiple applications, including gesture recognition, industrial automation, augmented reality, and robotics.</p>
<p>The versatility and precision of ToF cameras have spurred interest in their application in the agricultural industry. In particular, they hold significant potential for plant phenotyping by providing valuable data on plant structures, growth patterns, and health statuses. The depth information captured by ToF cameras allows researchers to create detailed 3D models of plants, enabling precise measurements of plant height, leaf area, and biomass.</p>
<p>Researchers have integrated ToF cameras into phenotyping platforms to automate the collection of morphological data. Several studies in the literature have explored the application of time-of-flight (ToF) cameras in plant phenotyping under CCP and FCP conditions, demonstrating their usefulness and addressing various challenges associated with depth imaging.</p>
<p>For CCP applications, <xref ref-type="bibr" rid="B134">Song et&#xa0;al. (2011)</xref> and <xref ref-type="bibr" rid="B1">Alenya et&#xa0;al. (2011)</xref> focused on enhancing depth estimation and 3D modeling by integrating ToF cameras with other imaging techniques. <xref ref-type="bibr" rid="B134">Song et&#xa0;al. (2011)</xref> combined stereo and ToF images to estimate dense depth maps for automated plant phenotyping. They developed a geometric approach to transform the ToF depth information for stereo imaging, focusing on challenging plant images captured in a glasshouse environment. Despite their success, they faced challenges with the reliability of ToF data under dynamic lighting conditions and the difficulty in obtaining accurate pixel-by-pixel depth data. <xref ref-type="bibr" rid="B1">Alenya et&#xa0;al. (2011)</xref>, on the other hand, used ToF cameras in conjunction with color data for robotic plant measurements. By combining hierarchical color segmentation with quadratic surface fitting using ToF depth data, they successfully interpolated depth maps that closely matched the original scenes. However, they encountered difficulties in accurately segmenting overlapping leaves and managing occlusions, which are common issues in dense plant canopies.</p>
<p>In a comparative analysis, <xref ref-type="bibr" rid="B61">Kazmi et&#xa0;al. (2014)</xref> examined the performance of ToF cameras and stereo vision sensors under various illumination conditions. They tested three ToF cameras (PMD CamBoard, CamCube&#x2014;pmd Group of Companies, Siegen, Germany, and SwissRanger SR4000&#x2014;MESA Imaging AG, Technoparkstrasse 1, 8005 Zurich) against stereo correspondence algorithms, assessing their efficacy in indoor and outdoor settings. They found that ToF cameras had varying performances based on the lighting conditions, with the PMD CamCube excelling in sunlight. Nonetheless, ToF cameras struggle with ambient light interference, low resolution, and limited range. <xref ref-type="bibr" rid="B61">Kazmi et&#xa0;al. (2014)</xref> also proposed methods to enhance the dynamic range of ToF cameras, highlighting the strengths and limitations of both technologies in-depth imaging. This study underscores the need for improved algorithms to mitigate the effects of changing light conditions and enhance depth accuracy.</p>
<p>Focusing on low-cost solutions, <xref ref-type="bibr" rid="B16">Cao et&#xa0;al. (2017)</xref> developed a ToF-based depth imaging system for phenotyping plants, explicitly targeting branch and seedpod detection. Using ToF cameras to capture 3D videos and images, they created 3D models to estimate the plant characteristics. Their findings underscored the potential of low-cost ToF cameras for efficient and high-throughput plant phenotyping, particularly for estimating biomass and crop yield. However, they noted challenges in processing large volumes of data and ensuring consistent accuracy across species and growth stages.</p>
<p>Lately, <xref ref-type="bibr" rid="B157">Yang and Cho (2021)</xref> and <xref ref-type="bibr" rid="B92">Ma et&#xa0;al. (2022)</xref> further advanced the application of ToF technology by combining it with other sensors for more precise phenotypic analysis. <xref ref-type="bibr" rid="B157">Yang and Cho (2021)</xref> integrated a Kinect v2 depth sensor with an RGB camera to achieve high-resolution 3D crop reconstruction. Their system and algorithms enabled the accurate reconstruction and automatic analysis of phenotypic indices for red pepper plants, demonstrating high accuracy with an error margin of approximately 5&#xa0;mm or less. Despite their success, they faced issues related to sensor calibration and the complexity of processing high-resolution data sets. Similarly, <xref ref-type="bibr" rid="B92">Ma et&#xa0;al. (2022)</xref> proposed a method for the automatic extraction of phenotypic traits from soybean canopies using 3D point cloud data acquired with a Kinect sensor. They developed a process for segmenting individual plants and calculating traits, such as plant height and leaf area index, and reported a high correlation between the estimated and manually measured values. However, they encountered difficulties in dealing with plant movement and variations in plant structure, which can affect the accuracy of trait measurements.</p>
<p>Likewise, several studies have explored the application of ToF cameras for plant phenotyping under FCP conditions, demonstrating both the utility and challenges of this technology in dynamic outdoor environments. <xref ref-type="bibr" rid="B69">Klose et&#xa0;al. (2009)</xref> and <xref ref-type="bibr" rid="B97">Moller et&#xa0;al. (2009)</xref> investigated the usability of 3D ToF cameras for automatic plant phenotyping and plant height measurements in field trials, respectively. <xref ref-type="bibr" rid="B69">Klose et&#xa0;al. (2009)</xref> focused on evaluating the performance of ToF cameras under varying outdoor conditions, such as direct sunlight, speed, humidity, and dust. They analyzed the color dependence, noise level, and depth resolution of cameras to determine their suitability for phenotyping applications. <xref ref-type="bibr" rid="B97">Moller et&#xa0;al. (2009)</xref> applied ToF cameras to measure the height of triticale in field trials. Their system utilized modulated light sources to calculate the distances for each pixel, enabling measurements while driving through test plots. The system achieved height estimation accuracy with a mean error of approximately 10%&#x2013;14% relative to manual measurements, although the performance varied with growth stage and environmental conditions. Despite achieving good results, challenges include managing environmental influences and ensuring the accuracy of height measurements during different growth stages.</p>
<p><xref ref-type="bibr" rid="B122">Ruckelshausen et&#xa0;al. (2009)</xref> and <xref ref-type="bibr" rid="B13">Busemeyer et&#xa0;al. (2010)</xref> extended the use of ToF technology to more sophisticated phenotyping platforms. <xref ref-type="bibr" rid="B122">Ruckelshausen et&#xa0;al. (2009)</xref> developed BoniRob, an autonomous field robot with multi-sensor systems, including ToF cameras, for individual plant phenotyping. This robot utilizes probabilistic robotics for navigation and multi-sensor fusion for accurate phenotypic measurements, emphasizing the importance of robustness and flexibility in field applications. <xref ref-type="bibr" rid="B14">Busemeyer et&#xa0;al. (2013)</xref> further enhanced BreedVision, a multi-sensor system integrated into a tractor for phenotyping high-density crop field plots, which they first developed (<xref ref-type="bibr" rid="B13">Busemeyer et&#xa0;al., 2010</xref>). This platform combines ToF cameras with other optical sensors to obtain comprehensive spectral and morphological data. They highlighted the importance of repeatability and robustness in sensor measurements, addressing challenges such as sensor calibration and data quality evaluation under field conditions.</p>
<p><xref ref-type="bibr" rid="B77">Li and Tang (2017)</xref> proposed a low-cost 3D plant reconstruction system using a 2D camera and a 3D ToF camera. They focused on developing algorithms for the precise alignment of multiple 3D views, enabling accurate 3D reconstruction and morphological trait characterization of corn seedlings. Their system demonstrated promising accuracy and speed, although challenges included maintaining alignment precision and handling complex plant structures. This study underscores the potential of affordable and high-performance phenotyping systems to enhance high-throughput phenotyping in indoor and outdoor settings.</p>
<p>Owing to the rugged nature of the field, many researchers have adopted field-based phenotyping robots because of their significant potential in large-scale agricultural applications and have focused on developing economically viable robotic platforms. For instance, <xref ref-type="bibr" rid="B160">Young et&#xa0;al. (2019)</xref> developed a low-cost robot for energy sorghum phenotyping, achieving plant height measurement accuracy with R&#xb2; = 0.90&#x2013;0.99 and RMSE of 5 cm&#x2013;8 cm, and stem diameter accuracy with R&#xb2; = 0.85&#x2013;0.92, demonstrating accurate plant height and stem width measurements over large areas. This system can be adapted for maize and other row crops, providing high spatial and temporal resolution data. <xref ref-type="bibr" rid="B31">Fan et&#xa0;al. (2022)</xref> presented a similar high-throughput phenotyping robot equipped with RGB-D cameras, achieving effective stem diameter measurements in challenging conditions of maize crop rows. <xref ref-type="bibr" rid="B135">Song et&#xa0;al. (2023)</xref> proposed a dynamic 3D data acquisition method using a consumer-grade RGB-D camera on a movable platform. This method efficiently collected RGB and depth images of crop canopies and achieved plant height estimation with R&#xb2; = 0.94&#x2013;0.99 and leaf area index correlation of R&#xb2; = 0.90&#x2013;0.96 across different maize growth stages. The system proved effective under various conditions, including different times of day and moving speeds, thus demonstrating its suitability for outdoor crop phenotyping.</p>
<p>Moreover, other studies have focused on optimizing methodologies and algorithms to fine-tune ToF applications in outdoor environments. This includes studies on poplar seedlings and maize plants that have demonstrated the capabilities of 3D ToF and RGB-D cameras in field phenotyping. For example, <xref ref-type="bibr" rid="B48">Hu et&#xa0;al. (2018)</xref> developed a method for measuring the leaf geometric characteristics of poplar seedlings using 3D visualization, demonstrating accurate measurements of leaf width, length, area, and inclination angle. <xref ref-type="bibr" rid="B145">V&#xe1;zquez-Arellano et&#xa0;al. (2018)</xref> focused on maize plants and utilized 3D reconstruction methods with ToF cameras to produce detailed point clouds and successfully validated seedling positions with high accuracy. Similarly, <xref ref-type="bibr" rid="B8">Bao et&#xa0;al. (2019b)</xref> created an automated system for characterizing maize architectural traits, achieving satisfactory accuracies for plant height, leaf angle, and plant orientation, proving the robustness of the system despite occlusions caused by leaves.</p>
<p>Furthermore, environmental sensor data fusion with non-rigid plant reconstruction models has been proposed to allow for the quick visualization of the environmental conditions in which plants grow. According to <xref ref-type="bibr" rid="B124">Sampaio et&#xa0;al. (2021)</xref>, fusion was performed through the colorization of the model regions, consistent with the sensor values at the heights where they were installed; in their proposed system, three height levels were selected. They experimented with three environmental sensors: temperature, humidity, and luminosity sensors. Their approach allows for accurate structural measurements and environmental mapping, enhancing crop efficiency and health evaluation.</p>
<p>In summary, ToF cameras present a distinctive performance profile characterized by high temporal resolution but moderate spatial accuracy in both environments. Under CCP conditions, ToF sensors achieve millimeter-level precision (&lt;5&#xa0;mm error; <xref ref-type="bibr" rid="B157">Yang and Cho, 2021</xref>) and enable real-time depth acquisition suitable for the dynamic monitoring of plant responses. Integration with RGB cameras facilitates simultaneous structural and color-based analyses, supporting applications from leaf segmentation to canopy LAI estimation (R&#xb2; = 0.94; <xref ref-type="bibr" rid="B92">Ma et&#xa0;al., 2022</xref>). However, ambient light interference, multipath reflections, and relatively low spatial resolution compared to laser triangulation limit the suitability of this technology for fine-scale organ measurements. In FCP settings, ToF cameras face additional challenges from sunlight saturation, which degrades depth accuracy in outdoor conditions, and from the limited operational range of consumer-grade sensors (<xref ref-type="bibr" rid="B61">Kazmi et&#xa0;al., 2014</xref>). Despite these constraints, the technology has found successful field applications through integration with robotic platforms (e.g., BoniRob, BreedVision), where real-time acquisition speed compensates for reduced precision, achieving a 10%&#x2013;14% mean error for canopy traits (<xref ref-type="bibr" rid="B77">Li and Tang, 2017</xref>; <xref ref-type="bibr" rid="B160">Young et&#xa0;al., 2019</xref>). The consistent pattern across studies indicates that ToF cameras are optimally positioned for applications prioritizing temporal frequency over spatial precision, time-series growth monitoring, dynamic response tracking, and real-time robotic guidance, rather than for high-accuracy static phenotyping.</p>
</sec>
<sec id="s2_4">
<label>2.4</label>
<title>Terrestrial Laser Scanning</title>
<p>Terrestrial Laser Scanning (TLS), also referred to as terrestrial light detection and ranging (LiDAR) or topographic LiDAR, is a remote sensing technology that captures precise three-dimensional (3D) information about objects and environments. Similar to 2D ToF cameras, TLS measures the time taken for emitted laser pulses to return after hitting a target. This time measurement calculates the distance to the target, enabling the creation of detailed 3D point clouds that accurately represent the scanned area. TLS devices consist of a laser emitter, a receiver, and, in most cases, a rotating mechanism to cover a large field of view (<xref ref-type="fig" rid="f3"><bold>Figure&#xa0;3</bold></xref>). The laser emits rapid light pulses, and the time taken for each pulse to return to the receiver after reflecting off an object is recorded. These data were used to compute the distance, generating a 3D point cloud in which each point had specific coordinates (x, y, z). This process is similar to that of ToF cameras; however, TLS systems typically offer higher precision and range. This technology is primarily employed to rapidly acquire 3D information across a wide range of topographic and industrial objects. This enables the precise modeling and documentation of diverse subjects, including cultural heritage sites, bridges, plants, vehicles, coastal cliffs, highways, and traffic collision damage (<xref ref-type="bibr" rid="B74">Lemmens and Lemmens, 2011</xref>).</p>
<fig id="f3" position="float">
<label>Figure&#xa0;3</label>
<caption>
<p>Principles of operation of a terrestrial laser scanner: Equipped with a multi-channel laser emitter for broader line-of-sight coverage and a rotating mechanism to scan a wider field of view.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1731852-g003.tif">
<alt-text content-type="machine-generated">Drone illustration with a laser emitter/receiver projecting multiple laser channels toward the ground, where a plant is situated. A three-dimensional axis (x, y, z) is shown near the plant.</alt-text>
</graphic></fig>
<p>Notably, TLS sensors are implemented using various technologies, which can significantly influence their applicability under CCP and FCP conditions. Therefore, understanding the underlying concepts and classifications of this technology is essential. The first level of categorization was drawn from <xref ref-type="bibr" rid="B144">Van Genechten (2008)</xref>. Their tutorial divided laser scanner technology into two main categories: static and dynamic. Static laser scanning involves keeping the scanner in a fixed position during data acquisition, offering high precision and a relatively high point density. While all static laser scanning can be considered terrestrial laser scanning, not all terrestrial laser scanning falls under the static category.</p>
<p>In contrast, dynamic laser scanning involves mounting the scanner on a mobile platform. This approach requires additional positioning systems, such as Inertial Navigation Systems (INS) and Global Positioning Systems (GPS), making the setup more complex and expensive. Dynamic laser scanning includes airborne laser scanning from an airplane, moving car, or unmanned aerial vehicle (UAV). Understanding these distinctions is crucial for appreciating the versatility and applications of TLS in various environments. Furthermore, <xref ref-type="bibr" rid="B74">Lemmens and Lemmens (2011)</xref> considered the measurement range of laser scanners to be one of the most important features of a TLS instrument as it significantly influences the types of applications for which they are suitable. The categorization is as follows: short-range laser scanners with a measurement range of up to 25&#xa0;m, medium-range laser scanners that can measure distances of up to 250&#xa0;m, and long-range laser scanners capable of measuring distances greater than 250&#xa0;m. This classification further helps determine the appropriate scanner for specific tasks and environments based on the required measurement range.</p>
<p>Additionally, laser scanners can be categorized based on the underlying technology, which is crucial for determining suitable deployment environments (<xref ref-type="bibr" rid="B18">Colombo and Marana, 2010</xref>). These categories include: 1) pulse measurements, also known in TLS as 3D time-of-flight, where pulses are emitted, and their travel time to and from the object is measured; 2) phase shift, where waves are modulated in width or frequency, with width modulation being sensitive to sharp discontinuities in the shape or reflectance of the object, and frequency modulation providing reliable measurements even when the return energy is low; 3) optical triangulation, used for short-range applications and small objects; and 4) interferometry, which offers very high precision and is typically used in indoor industrial metrology. Phase-shift and pulse measurements are commonly utilized in TLS systems for outdoor applications. Understanding these categories and operational principles of laser scanning technologies sets the stage for exploring their practical applications in plant phenotyping.</p>
<p>In addition to classification by measurement principle, LiDAR systems for plant phenotyping can be categorized by deployment platform, each offering distinct trade-offs between spatial resolution, coverage, and operational complexity (<xref ref-type="bibr" rid="B168">Zhu et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B57">Jin et&#xa0;al., 2021</xref>). Terrestrial LiDAR (TLS), operated from fixed tripod positions or ground-based mobile platforms, achieves the highest point densities (typically 100 points/m&#xb2;&#x2013;10,000 points/m&#xb2; at close range) and is optimal for the detailed structural characterization of individual plants or small plots. However, fixed TLS requires multiple scan positions to minimize occlusions, limiting the throughput for large-scale phenotyping.</p>
<p>Mobile terrestrial LiDAR (MLS), including vehicle-mounted, backpack, and handheld configurations, addresses the throughput limitations of static TLS while maintaining high point densities (typically 50 points/m&#xb2;&#x2013;500 points/m&#xb2;). Backpack LiDAR systems, such as those described by <xref ref-type="bibr" rid="B168">Zhu et&#xa0;al. (2021)</xref>, can phenotype hundreds of field plots per day, achieving a height estimation RMSE of 5&#x2013;6 cm with sufficient point density for plot-level trait extraction. The integration of simultaneous localization and mapping (SLAM) algorithms enables continuous data acquisition without the need for external positioning references in certain systems.</p>
<p>Airborne LiDAR (ALS), deployed from manned aircraft or UAVs, provides the largest spatial coverage but at reduced point densities (typically 1 points/m&#xb2;&#x2013;50 points/m&#xb2; for UAV-LiDAR and &lt;1 point/m&#xb2; for aircraft-mounted systems). UAV-LiDAR has emerged as a practical compromise, offering field-scale coverage with point densities that are sufficient for canopy-level trait extraction (<xref ref-type="bibr" rid="B44">Harkel et&#xa0;al., 2020</xref>). However, UAV payload constraints limit the sensor quality compared to terrestrial systems, and the regulatory requirements for larger UAVs add operational complexity.</p>
<p>Furthermore, the minimum point density required varies substantially according to the target trait. Canopy-level traits (height, cover, and volume) can be reliably extracted from point clouds with densities as low as 10 points/m&#xb2;&#x2013;50 points/m&#xb2; (<xref ref-type="bibr" rid="B93">Madec et&#xa0;al., 2017</xref>), whereas organ-level traits (leaf dimensions and stem diameter) typically require densities exceeding 500 points/m&#xb2; (<xref ref-type="bibr" rid="B110">Paulus, 2019</xref>). This relationship between point density and achievable trait resolution explains the continued role of high-density TLS in detailed phenotyping, despite the throughput advantages of airborne systems. For breeding applications focused on canopy-level selection traits, UAV-LiDAR provides adequate resolution. For physiological studies requiring organ-level measurements, terrestrial systems remain essential.</p>
<p>Until the late 2000s, the use of TLS in plant-related studies was limited to monitoring and modeling large forest tree species (<xref ref-type="bibr" rid="B41">Gorte and Pfeifer, 2004</xref>; <xref ref-type="bibr" rid="B47">Hosoi and Omasa, 2009</xref>; <xref ref-type="bibr" rid="B117">Preuksakarn et&#xa0;al., 2010</xref>). However, the focus is gradually shifting towards its application in crop monitoring and modeling under both CCP and FCP conditions. TLS has proven to be a powerful tool for plant phenotyping under FCP conditions, providing high-resolution and accurate data on plant structures and spatial distribution. The application of TLS in open-field environments presents different challenges and opportunities. Outdoor conditions introduce variability in lighting, weather, and plant interactions, affecting the quality of the collected data. The robustness of TLS technology allows for comprehensive assessments of large-field plant growth dynamics, health, and spatial distribution, providing insights that are crucial for improving crop management and breeding programs. Several studies have demonstrated the practical use of TLS for FCP conditions to perform growth monitoring (<xref ref-type="bibr" rid="B23">Dhami et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B34">Friedli et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B161">Yuan et&#xa0;al., 2018</xref>), health monitoring (<xref ref-type="bibr" rid="B136">Su et&#xa0;al., 2019</xref>), biomass estimation (<xref ref-type="bibr" rid="B20">Deery et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B76">Li et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B106">Pan et&#xa0;al., 2022</xref>) and yield prediction (<xref ref-type="bibr" rid="B94">Malambo et&#xa0;al., 2019</xref>). Beyond these primary applications, LiDAR-derived traits have been expanded to include canopy structural complexity indices (<xref ref-type="bibr" rid="B168">Zhu et&#xa0;al., 2021</xref>), leaf area index estimation through gap fraction analysis (<xref ref-type="bibr" rid="B47">Hosoi and Omasa, 2009</xref>), lodging severity quantification (<xref ref-type="bibr" rid="B94">Malambo et&#xa0;al., 2019</xref>), and temporal growth rate characterization through multi-date acquisitions (<xref ref-type="bibr" rid="B57">Jin et&#xa0;al., 2021</xref>). The ability to derive multiple traits from a single acquisition, including height, volume, surface area, and structural heterogeneity, positions LiDAR as a particularly efficient sensing modality for breeding programs that require comprehensive phenotypic characterization.</p>
<p>Despite its advantages for field phenotyping, LiDAR technology faces several challenges specific to FCP environments that can limit the data quality and trait extraction accuracy. Ground-canopy separation presents a fundamental difficulty in dense crop stands, where laser pulses may fail to penetrate to ground level, compromising the accuracy of height calculations that depend on digital terrain models (<xref ref-type="bibr" rid="B168">Zhu et&#xa0;al., 2021</xref>). Multi-return LiDAR systems partially address this issue by distinguishing the first and last returns; however, their performance is degraded in crops with overlapping canopy layers.</p>
<p>Wind-induced motion during scanning introduces noise and registration errors, which are particularly problematic for mobile platforms, where the scan duration may span several seconds per plot. <xref ref-type="bibr" rid="B34">Friedli et&#xa0;al. (2016)</xref> documented increased height estimation variance under windy conditions, recommending data acquisition during calm periods when feasible. Atmospheric conditions, including dust, fog, and precipitation, can attenuate laser returns and introduce spurious points, although active sensing is generally more robust to these factors than passive imaging techniques.</p>
<p>Canopy penetration varies with plant architecture and growth stage, affecting the structural characterization completeness. Erectophile canopies (erect leaves) typically permit greater laser penetration than planophile architectures, creating systematic differences in point cloud completeness among genotypes (<xref ref-type="bibr" rid="B57">Jin et&#xa0;al., 2021</xref>). The interaction between plant architecture and sensing geometry represents an often-overlooked source of measurement bias in comparative phenotyping studies. Finally, the computational burden of processing high-density LiDAR point clouds remains substantial, with datasets for large breeding trials potentially reaching terabytes and requiring specialized processing pipelines (<xref ref-type="bibr" rid="B43">Harandi et&#xa0;al., 2023</xref>).</p>
<p>Quantitative validation of TLS under field conditions demonstrated robust performance for canopy-level phenotyping. <xref ref-type="bibr" rid="B161">Yuan et&#xa0;al. (2018)</xref> compared ground-based LiDAR with UAV photogrammetry for wheat height estimation, finding LiDAR achieved superior accuracy (RMSE&#xa0;=&#xa0;0.05 m, R&#xb2; = 0.97) compared to UAV-SfM (RMSE&#xa0;=&#xa0;0.09 m, R&#xb2; = 0.91). <xref ref-type="bibr" rid="B168">Zhu et&#xa0;al. (2021)</xref> demonstrated that backpack-mounted LiDAR systems could phenotype wheat plots with a height estimation RMSE of 5 cm&#x2013;6 cm while maintaining a throughput sufficient for large breeding trials (&gt;500 plots per day). For biomass estimation, <xref ref-type="bibr" rid="B20">Deery et&#xa0;al. (2020)</xref> reported an R&#xb2; of 0.86 between TLS-derived canopy volume and destructive biomass measurements in wheat plants. These results indicate that mobile TLS platforms can achieve field phenotyping accuracy approaching that of fixed-gantry systems while offering substantially greater flexibility and coverage.</p>
<p>In contrast, TLS has been used in CCP to enhance the accuracy and efficiency of phenotyping processes. For instance, <xref ref-type="bibr" rid="B107">Panjvani et&#xa0;al. (2019)</xref> developed a low-cost LiDAR-based 3D scanning system to estimate key leaf traits, such as length, width, and area. The LiDARPheno system used a LiDAR sensor interfaced with Arduino Uno and Raspberry Pi to create a cost-effective and user-friendly setup. This study demonstrated the potential of LiDAR to provide accurate phenotypic measurements, emphasizing its applicability in indoor settings, where traditional methods might fall short because of the complex structure of plants. Similarly, <xref ref-type="bibr" rid="B108">Patel et&#xa0;al. (2023)</xref> developed a deep learning-based approach to enhance individual plant organ segmentation and phenotyping under controlled scanning conditions using LiDAR point clouds. Furthermore, <xref ref-type="bibr" rid="B149">Wang et&#xa0;al. (2018)</xref> performed a comparative study of TLS and MVS reconstruction and concluded that TLS provided satisfactory point clouds for medium- and high-maize plants with acceptable efficiency. However, the results were not adequate for small maize plants. A more recent comprehensive review of TLS applications in crop management for precision agriculture can be found in <xref ref-type="bibr" rid="B32">Farhan et&#xa0;al. (2024)</xref>.</p>
<p>In summary, TLS occupies a unique position among 3D phenotyping technologies, with its primary strengths more naturally aligned with FCP than with CCP applications. Under controlled conditions, TLS provides sub-millimeter to millimeter accuracy and enables detailed architectural measurements (R&#xb2; &gt;0.90 for organ traits; <xref ref-type="bibr" rid="B149">Wang et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B141">Thapa et&#xa0;al., 2018</xref>), but its advantages over simpler, lower-cost alternatives (laser triangulation, structured light) are limited when environmental control eliminates the need for long-range capability and lighting independence. In contrast, TLS demonstrates robust performance under field conditions, where its active illumination, independence from ambient light, and extended measurement range (up to 250+ m for long-range systems) provide decisive advantages. Field studies have consistently reported centimeter-level accuracy (RMSE&#xa0;=&#xa0;5&#x2013;6 cm for plant height; <xref ref-type="bibr" rid="B161">Yuan et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B23">Dhami et&#xa0;al., 2020</xref>) and strong correlations for biomass estimation (R&#xb2; = 0.86; <xref ref-type="bibr" rid="B20">Deery et&#xa0;al., 2020</xref>). Recent innovations in mobile and backpack-mounted LiDAR systems (<xref ref-type="bibr" rid="B168">Zhu et&#xa0;al., 2021</xref>) have substantially improved field throughput while maintaining accuracy, thereby enabling large-scale phenotyping across hundreds of plots. Key limitations of FCP include challenges with ground-canopy separation in dense vegetation, wind-induced noise during scanning, and computational demands for processing high-density point clouds. The synthesis across studies reveals that TLS is optimally deployed for field-scale structural phenotyping, where environmental robustness&#xa0;and measurement range outweigh the cost and complexity considerations that favor simpler technologies in controlled environments.</p>
</sec>
<sec id="s2_5">
<label>2.5</label>
<title>Structured light approaches</title>
<p>Structured light (SL) scanning is an advanced optical technique widely used in various fields for precise 3D surface measurements and imaging. This technology operates by projecting a series of light patterns, often in the form of grids or stripes, onto objects (<xref ref-type="bibr" rid="B123">Salvi et&#xa0;al., 2010</xref>). The deformation of these patterns when they interact with the object&#x2019;s surface is captured by a single camera or a pair of cameras (<xref ref-type="fig" rid="f4"><bold>Figure&#xa0;4</bold></xref>). By analyzing the captured images, complex algorithms can reconstruct the 3D geometry of an object with high accuracy and resolution.</p>
<fig id="f4" position="float">
<label>Figure&#xa0;4</label>
<caption>
<p>Structured light scanning mechanism: A projector emits a predetermined pattern, while a camera (or a pair of cameras) captures light deformation on the target object.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1731852-g004.tif">
<alt-text content-type="machine-generated">Illustration showing pattern projection and deformation analysis on a plant. An orange striped pattern is projected onto the plant, causing deformation. The recorded pattern is shown alongside the projected pattern. An axis labeled x, y, and z indicates orientation.</alt-text>
</graphic></fig>
<p>Similar to laser triangulation and stereo vision discussed previously, the core principle behind SL scanning involves replacing one of the cameras in stereo vision with a projector and applying the triangulation method, where the positions of the light projector and cameras are fixed and known (<xref ref-type="bibr" rid="B123">Salvi et&#xa0;al., 2010</xref>). When a light pattern is projected onto an object, each point on the surface creates a unique deformation in the pattern, allowing the system to calculate the precise 3D coordinates of those points. This method offers several advantages, including high precision capable of capturing fine details (<xref ref-type="bibr" rid="B148">Wan et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B167">Zhou et&#xa0;al., 2021</xref>), rapid data acquisition suitable for dynamic environments (<xref ref-type="bibr" rid="B150">Wang and Zhu, 2024</xref>; <xref ref-type="bibr" rid="B167">Zhou et&#xa0;al., 2021</xref>), and non-contact measurement that ensures that delicate objects are not disturbed or damaged. Structured light scanning has been employed in diverse industries, such as manufacturing, healthcare, cultural heritage preservation, and entertainment. In manufacturing, it ensures quality control by providing detailed inspections of parts and assemblies (<xref ref-type="bibr" rid="B54">Javaid et&#xa0;al., 2021</xref>). In healthcare, it aids in creating accurate 3D models for prosthetics and surgical planning (<xref ref-type="bibr" rid="B104">Olesen et&#xa0;al., 2011</xref>). The cultural heritage sector uses it for the preservation and digital archiving of artifacts (<xref ref-type="bibr" rid="B59">Kantaros et&#xa0;al., 2023</xref>), whereas in entertainment, it enables realistic 3D modeling for visual effects and animations (<xref ref-type="bibr" rid="B45">Hieda, 2015</xref>). Readers may refer to <xref ref-type="bibr" rid="B164">Zhang (2018)</xref> for a more systematic review of 3D shape measurements using SL methods.</p>
<p>Over the past few decades, the agricultural sector has recognized the potential of SL scanning, particularly in 3D morphological plant phenotyping. The adoption of SL scanning in plant phenotyping has numerous benefits. It provides a detailed morphological analysis by creating high-resolution 3D models of plants, capturing intricate details of plant shape, size, and structure (<xref ref-type="bibr" rid="B101">Nguyen et&#xa0;al., 2015</xref>). Similar to other approaches, it enables dynamic growth monitoring, allowing continuous observation of plant development over time, which is crucial for studying dynamic processes. This technology can also facilitate stress response analysis by assessing the physical changes in plants under various stress conditions, such as drought, disease, or nutrient deficiency (<xref ref-type="bibr" rid="B98">Nam et&#xa0;al., 2014</xref>). Furthermore, it supports high-throughput analysis and can quickly examine large numbers of plants, thereby enhancing the efficiency of phenotyping processes.</p>
<p>SL scanning systems are commonly integrated into CCP environments, such as greenhouses and growth chambers, for practical implementation. For example, <xref ref-type="bibr" rid="B101">Nguyen et&#xa0;al. (2015)</xref> described a novel 3D indoor reconstruction system for plants that utilizes multiple high-resolution digital cameras, structured illumination, and computer vision techniques to enable non-destructive phenotyping of various crop plants, including cabbage, cucumber, and tomato. <xref ref-type="bibr" rid="B98">Nam et&#xa0;al. (2014)</xref> also demonstrated the potential of using SL to detect changes in growth responses to abiotic stress based on 3D leaf area analysis from the reconstructed point cloud.</p>
<p>In the case of FCP applications, there are limited studies reported in the literature; nevertheless, studies such as <xref ref-type="bibr" rid="B120">Rosell-Polo et&#xa0;al. (2015)</xref> provide a very detailed analysis of the capabilities of SL in precision agriculture. According to the authors, field tests demonstrated that these SL sensors effectively captured RGB-D point clouds for detailed 3D models, which can support site-specific phenotyping applications, including weed control. <xref ref-type="bibr" rid="B120">Rosell-Polo et&#xa0;al. (2015)</xref> reported that under controlled outdoor lighting (dawn/dusk conditions), structured light systems achieved canopy volume estimation with R&#xb2; = 0.99, although the accuracy degraded substantially (&gt;50% error increase) under direct sunlight conditions. Additionally, although sensor performance is limited under high ambient light, their affordability, high frame rate, and flexibility render them valuable for precision agriculture and outdoor conditions during dawn, dusk, or nighttime. The scarcity of quantitative FCP validation studies for structured light systems represents a notable research gap, as most published accuracy assessments are derived from controlled environments.</p>
<p>In summary, structured light technology exhibits the most pronounced performance disparity between the CCP and FCP environments among all the reviewed techniques. Under controlled conditions, SL systems achieve millimeter-level accuracy (&lt;13&#xa0;mm error; <xref ref-type="bibr" rid="B101">Nguyen et&#xa0;al., 2015</xref>) and high correlations for leaf area and stress response measurements (R&#xb2; &gt;0.9; <xref ref-type="bibr" rid="B98">Nam et&#xa0;al., 2014</xref>), benefiting from the ability to control ambient lighting that would otherwise interfere with the projected patterns. This technology excels in high-resolution surface reconstruction for detailed morphological analysis and supports applications from leaf area quantification to drought stress detection. However, the fundamental dependence on controlled lighting represents a critical limitation for field deployment, as high ambient light severely degrades pattern detection, effectively restricting outdoor operations to dawn, dusk, or nighttime periods (<xref ref-type="bibr" rid="B120">Rosell-Polo et&#xa0;al., 2015</xref>). This operational constraint, combined with calibration sensitivity and the requirement for a fixed projector-camera geometry, has limited FCP adoption despite the affordability of the technology relative to laser-based alternatives. The consistent finding across studies is that structured light offers an attractive cost-accuracy balance for indoor phenotyping but cannot currently serve as a general-purpose field solution. Future developments in high-power projection and ambient light rejection may expand the operational envelope; however, at present, the technology remains predominantly CCP-oriented, with only niche FCP applications under controlled lighting conditions.</p>
</sec>
<sec id="s2_6">
<label>2.6</label>
<title>3D light field cameras</title>
<p>Light-field (LF) cameras represent an innovative leap in imaging technology, offering capabilities that extend far beyond those of traditional photography. Unlike conventional cameras, which capture a two-dimensional representation of a scene, LF cameras record the amount of light that travels in every direction through every point in space (<xref ref-type="bibr" rid="B56">Jeon et&#xa0;al., 2015</xref>). This is achieved by capturing the light field, a concept rooted in physics that describes the intensity and direction of light rays in a given environment (<xref ref-type="bibr" rid="B153">Wu et&#xa0;al., 2017</xref>).</p>
<p>The core of LF technology lies in its ability to capture both spatial and angular information of light rays. This is typically accomplished using an array of micro-lenses placed in front of the camera&#x2019;s main lens (<xref ref-type="bibr" rid="B153">Wu et&#xa0;al., 2017</xref>). Each microlens captures light from different angles, allowing the camera to reconstruct the entire LF (<xref ref-type="fig" rid="f5"><bold>Figure&#xa0;5</bold></xref>). This process results in a wealth of data that can be manipulated post-capture, enabling features such as refocusing, changing the depth of field, and creating 3D images from a single exposure (<xref ref-type="bibr" rid="B56">Jeon et&#xa0;al., 2015</xref>). In other words, the most significant advantage of LF cameras is their ability to refocus images after they have been captured. This feature is possible because the camera records light from multiple perspectives, allowing users to select different focal points during post-processing. Additionally, the depth information captured by LF cameras enables the creation of stereoscopic images, making it possible to render scenes in 3D and extract depth maps for various applications.</p>
<fig id="f5" position="float">
<label>Figure&#xa0;5</label>
<caption>
<p>Operational principle of a light field camera: Features an array of micro-lenses positioned in front of the camera&#x2019;s main lens.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1731852-g005.tif">
<alt-text content-type="machine-generated">Illustration depicting a camera system with labeled components: image sensor, micro lenses, and main lens. Orange light rays from a target object converge through the lenses, indicating light path. The target object is a plant, and a three-dimensional coordinate system with axes labeled x, y, and z is shown at its base.</alt-text>
</graphic></fig>
<p>LF technology has primarily been applied in various fields, including virtual reality, computational photography, and industrial inspection. Its ability to capture and manipulate 3D information makes it particularly valuable in areas where precise spatial data are essential. For instance, in cinematography, LF cameras allow filmmakers to create immersive experiences by capturing scenes that can be navigated and refocused during post-production, offering unprecedented creative flexibility (<xref ref-type="bibr" rid="B11">Broxton et&#xa0;al., 2020</xref>).</p>
<p>Building on the technological foundation of light-field cameras, their application in agriculture, particularly in plant phenotyping, represents a promising frontier. In plant phenotyping, LF cameras can capture detailed 3D models of plants, enabling researchers to analyze various target traits and their growth patterns. The depth information provided by LF cameras is beneficial for assessing traits that are difficult to measure with traditional 2D imaging, such as leaf angle distribution and canopy structure (<xref ref-type="bibr" rid="B115">Polder and Hofstee, 2014</xref>). Moreover, the ability to refocus images allows for more accurate measurements of these traits, as researchers can adjust the focal plane to capture sharp images of specific plant parts (<xref ref-type="bibr" rid="B127">Schima et&#xa0;al., 2016</xref>).</p>
<p>According to <xref ref-type="bibr" rid="B115">Polder and Hofstee (2014)</xref>, one of the key advantages of using LF cameras for plant phenotyping is the potential for automated large-scale data collection. By integrating LF cameras with machine learning algorithms, systems can be developed that automatically analyze plant traits from captured images, significantly reducing the time and effort required for phenotyping. This capability is especially valuable in breeding programs, where large populations of plants need to be evaluated for desirable traits.</p>
<p>Furthermore, <xref ref-type="bibr" rid="B127">Schima et&#xa0;al. (2016)</xref> underscored the potential of LF cameras as a powerful tool for on-site crop monitoring. This study evaluated a light-field camera system capable of capturing plant growth dynamics and traits in a field environment. The immunity of this technology to ambient conditions, such as varying light levels and environmental changes, makes it an effective tool for long-term plant monitoring, offering reliable performance across different settings. This makes light-field cameras particularly useful for large-scale in-field applications where traditional imaging systems might struggle with environmental variability. The integration of light-field cameras into real-time crop growth monitoring systems improves the spatial accuracy of trait measurements. This enhances the ability to track changes in plant morphology over time, providing valuable data for breeding programs and in precision agriculture. This study also highlighted the cost-effectiveness of these cameras in large-scale agricultural research, making them practical solutions for automated data collection across diverse crop types and environments.</p>
<p>While LF cameras hold significant potential for plant phenotyping, especially in capturing 3D spatial data and enabling automated large-scale trait analysis, their adaptation in this field has been limited by several technological and operational challenges. <xref ref-type="bibr" rid="B127">Schima et&#xa0;al. (2016)</xref> and <xref ref-type="bibr" rid="B115">Polder and Hofstee (2014)</xref> provided valuable insights into the current limitations of LF cameras, which likely explains their slower adoption in phenotyping practices.</p>
<p>A key limitation highlighted by <xref ref-type="bibr" rid="B127">Schima et&#xa0;al. (2016)</xref> is the limited depth resolution of LF cameras, particularly at long distances. For example, the Lytro LF exhibited accurate depth estimation only within a range of 10 cm&#x2013;50 cm, which is insufficient for many field-based phenotyping tasks that require larger distances between the camera and plants. This short range significantly restricts the utility of LF cameras for large-scale plant height estimation or other growth monitoring tasks in tall crops or large-field plots. While newer models, such as the Lytro Illum, have improved sensor sizes, the small stereoscopic base of early models, such as the Lytro LF, makes them unsuitable for accurate measurements over larger distances. Furthermore, <xref ref-type="bibr" rid="B127">Schima et&#xa0;al. (2016)</xref> also report that cost remains a barrier, as achieving higher pixel resolution and depth accuracy would require more advanced (and expensive) cameras with larger sensors and improved microlens arrays.</p>
<p><xref ref-type="bibr" rid="B115">Polder and Hofstee (2014)</xref> highlighted the additional technical challenges that arise when deploying LF cameras in greenhouse environments. One such issue is the complexity of calibration for accurate depth- and focus measurements. Over-saturation of pixels during calibration was found to disturb the proper calculation of depth and focus, which is a significant problem in environments where lighting is variable or difficult to control. Additionally, the fixed aperture setting of the camera (f/11) presented limitations in terms of image intensity control, particularly in combination with flash illumination, which is crucial for consistent daytime imaging in phenotyping environments. These constraints make it difficult to use LF cameras in practical phenotyping setups without significant modifications or expensive equipment.</p>
<p>Moreover, the limited field of view (FOV) of the LF camera is another significant drawback of phenotyping, as noted by <xref ref-type="bibr" rid="B115">Polder and Hofstee (2014)</xref>. In their experiment, the LF camera had a maximum FOV of 50&#xb0;, which was insufficient to capture entire plants, notably taller plants growing in greenhouse environments. This necessitates additional cameras or optical enhancements, such as mirrors, to achieve a wider FOV, further complicating the system design and increasing costs. This narrow FOV restricts the efficiency of image capture and data collection for large-scale phenotyping, rendering the technology less practical for high-throughput phenotyping.</p>
<p>The heavy computational requirements of LF cameras also present a significant challenge, as noted by <xref ref-type="bibr" rid="B127">Schima et&#xa0;al. (2016)</xref> and <xref ref-type="bibr" rid="B115">Polder and Hofstee (2014)</xref>. The large file sizes of LF images (approximately 40 MB per image in Polder&#x2019;s study) require substantial computing power to process depth and focus information. In robotic platforms, such as PhenoBot (<xref ref-type="bibr" rid="B118">Richardson et&#xa0;al., 2023</xref>), the power consumption of the computing systems required for LF processing was found to strain the battery resources of the robot, suggesting that a distributed setup (where images are captured on the robot and processed on a separate computer) is necessary to avoid battery depletion. This additional infrastructure increases the complexity and cost of the phenotyping system.</p>
<p>The quantitative validation of light-field cameras under field conditions remains extremely limited. <xref ref-type="bibr" rid="B127">Schima et&#xa0;al. (2016)</xref> reported plant height estimation with an average deviation error of 4.33 units (sensor-specific scale) under field conditions, representing substantially lower accuracy than achieved by MVS or TLS alternatives. The scarcity of quantitative FCP studies on light-field technology reflects both the relative immaturity of this technology for agricultural applications and the practical challenges of deploying these systems outside controlled environments. This gap in field validation represents a significant barrier to assessing the potential of this technology for routine phenotyping applications and should be addressed in future research.</p>
<p>In summary, light-field cameras represent an emerging technology with distinctive capabilities, particularly post-capture refocusing and single-exposure depth acquisition, but they have substantial limitations that have constrained their adoption in both CCP and FCP contexts. Under controlled conditions, LF cameras enable novel analytical approaches, including depth-based segmentation and variable focal plane analysis, for stem and leaf characterization (<xref ref-type="bibr" rid="B115">Polder and Hofstee, 2014</xref>). However, effective depth estimation is limited to short ranges (10 cm&#x2013;50 cm; <xref ref-type="bibr" rid="B127">Schima et&#xa0;al., 2016</xref>), the narrow field of view necessitates multiple captures or optical enhancements for whole-plant imaging, and substantial computational resources are required for processing (file sizes approximately 40 MB per image). These constraints have prevented LF technology from competing with established approaches for routine CCP phenotyping, despite its unique capabilities. In FCP settings, additional challenges emerge: the short effective range limits the applicability to close-range ground-based systems, and the computational demands strain the mobile platform&#x2019;s power budget (<xref ref-type="bibr" rid="B118">Richardson et&#xa0;al., 2023</xref>). Field validation has been limited, with reported accuracy (4.33 average deviation error for height; <xref ref-type="bibr" rid="B127">Schima et&#xa0;al., 2016</xref>) being below that achieved by more mature technologies. The synthesis across available studies suggests that light-field cameras currently occupy a specialized niche for applications requiring post-capture focal adjustment or depth-from-single-exposure capabilities but do not yet offer compelling advantages over MVS or ToF alternatives for general phenotyping tasks. Continued development of sensor resolution, depth range, and processing efficiency is required before LF technology can achieve broader adoption in plant phenotyping workflows.</p>
</sec>
</sec>
<sec id="s3">
<label>3</label>
<title>Sensor mounting and carrier platforms</title>
<p>Effective 3D precision crop management relies not only on the sensors themselves but also on their strategic deployment. The mounting and carrying of sensors is a key factor in ensuring accurate and reliable data capture across various environments. Whether in controlled laboratory settings or challenging outdoor fields, the choice and/or design of mounting mechanisms and carrier platforms determines the quality and consistency of phenotypic data. Furthermore, 3D precision sensors naturally perceive the world within a given scope (i.e., in terms of visibility and distance) and thus require either the sensor or the target object to be moved and/or rotated to capture distant and obscured sides or objects. This section explores the critical role of sensor mounting and carrier platform. It offers insights into various options and their suitability across different agricultural contexts to optimize data collection and generate precise and actionable insights for crop management and improvement.</p>
<sec id="s3_1">
<label>3.1</label>
<title>Sensor mounting</title>
<p>In this study, sensor mounting refers to the method or mechanism by which a sensor is physically positioned and secured to capture the data. Depending on the configuration, sensor mounting mechanisms must provide the desired stability, accuracy, and flexibility in different environments, whether in controlled laboratory settings or challenging field conditions. The choice of mounting method, whether a tripod, articulated arm, gimbal, or handheld setup, affects not only the precision of the data but also the efficiency and scope of the phenotyping process. This subsection introduces various sensor mounting mechanisms (<xref ref-type="fig" rid="f6"><bold>Figure&#xa0;6</bold></xref>), examining their technical attributes and how they contribute to optimizing data capture for precise crop management and analysis.</p>
<fig id="f6" position="float">
<label>Figure&#xa0;6</label>
<caption>
<p>Common sensor mounting configurations in 3D phenotyping: <bold>(A)</bold> gimbal, <bold>(B)</bold> tripod stand, <bold>(C)</bold> handheld holder, <bold>(D)</bold> robotic arm, and <bold>(E)</bold> fixed post.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1731852-g006.tif">
<alt-text content-type="machine-generated">Illustration of various camera mount mechanisms. (a) Features an articulated arm mount. (b) Depicts a tripod with adjustable legs. (c) Shows a handheld gimbal with rotation arrows. (d) Illustrates an industrial robotic arm. (e) Represents a solar-powered pole with a camera mounted above grass.</alt-text>
</graphic></fig>
<sec id="s3_1_1">
<label>3.1.1</label>
<title>Tripods and fixed posts</title>
<p>Tripods and fixed posts are among the most commonly used mounting mechanisms for sensor deployment, particularly in controlled environments such as laboratories or greenhouses. These mechanisms provide a stable and stationary sensor platform that ensures consistent positioning over time. Tripods are adjustable in height and angle, allowing flexibility in sensor placement, which is crucial for capturing data at different stages of plant growth or from various angles. For instance, <xref ref-type="bibr" rid="B1">Alenya et&#xa0;al. (2011)</xref> used a tripod-mounted time-of-flight (ToF) camera to monitor maize plant growth in a greenhouse environment. The tripod ensured that the camera remained perfectly still, allowing for precise and repeatable 3D scans at multiple growth stages. According to the authors, this setup was crucial for analyzing the volumetric changes and structural development of the plant, as even minor shifts in the camera position could introduce significant errors in the 3D reconstructions. Fixed posts, often more robust and permanently installed, offer unparalleled stability, making them ideal for long-term monitoring in field conditions where a terrestrial laser scanner (TLS) can be employed to monitor the growth and structure of trees or taller crops.</p>
<p><xref ref-type="bibr" rid="B9">B&#xe9;land et&#xa0;al. (2014)</xref> used a fixed post to mount a TLS device for the 3D scanning of forest canopy structures. This provides a comprehensive view of tree architecture, capturing detailed data on branch and leaf distributions, which is critical for studying light interception and biomass estimation in forestry research. Both methods are beneficial for high-precision measurements, where the sensor movement is complex and/or may introduce errors or inconsistencies in the data.</p>
</sec>
<sec id="s3_1_2">
<label>3.1.2</label>
<title>Articulated arm</title>
<p>Articulated arms are versatile mounting mechanisms that provide high sensor positioning and orientation flexibility. These arms can be adjusted autonomously in multiple directions and angles, allowing the sensors to be positioned precisely and moved dynamically from various perspectives during data collection without the need to reposition the plant. This is particularly advantageous when data need to be captured from multiple perspectives or when the sensor needs to be repositioned frequently without disturbing the plant or the surrounding environment (<xref ref-type="bibr" rid="B4">Atefi et&#xa0;al., 2021</xref>). Articulated arms are often used in laboratory settings for detailed phenotyping tasks, such as scanning specific plant structures or capturing images from various angles, and can be integrated with automated systems for high-throughput data acquisition. <xref ref-type="bibr" rid="B113">Paulus and Jens (2015)</xref> found that a robotic arm is crucial for enhancing the precision and flexibility of the laser scanning process used for phenotyping cereal plants. This system uses a robotic arm equipped with a laser scanner to capture 3D models of plant architecture, allowing for the high-resolution, non-invasive analysis of plant traits, such as canopy structure and stem alignment. The robotic arm provides precise control over the orientation and position of the sensor, facilitating the systematic scanning of plant surfaces and ensuring comprehensive coverage from multiple angles. This setup improves data consistency and accuracy, particularly in controlled environments where detailed plant morphology and growth patterns are monitored over time. The articulated arm&#x2019;s ability to maintain consistent sensor positioning while allowing for dynamic adjustments makes it an invaluable tool for detailed morphological studies.</p>
<p>Moreover, recent advancements in quadruped robotics, particularly platforms such as Boston Dynamics&#x2019; Spot, have opened new possibilities for deploying articulated arms in field conditions (<xref ref-type="bibr" rid="B86">Lopes et&#xa0;al., 2023</xref>). The mobility and adaptability of quadruped robots make them ideal for navigating complex terrains, where traditional wheeled or stationary platforms are less effective. Equipping quadrupeds, such as Spot, with articulated sensor arms makes it possible to maneuver sensors around plants in the field, capturing fine-scale morphological data that are typically only accessible in controlled environments.</p>
</sec>
<sec id="s3_1_3">
<label>3.1.3</label>
<title>Gimbal</title>
<p>Gimbals are advanced mounting mechanisms designed to stabilize sensors, even when the platform on which they are mounted is in motion (<xref ref-type="bibr" rid="B132">Singh et&#xa0;al., 2016</xref>). This technology is particularly beneficial in field conditions where drones or robotic platforms are used to carry sensors. The gimbal allows the sensor to maintain a steady orientation, compensating for movement or vibrations, which is critical for capturing clear and accurate data. Gimbals are commonly used in aerial phenotyping with drones, where maintaining a stable image or scan is essential despite the movement of the drone (<xref ref-type="bibr" rid="B38">Ga&#x161;parovi&#x107; and Jurjevi&#x107;, 2017</xref>). This ensures high-quality data capture, which is crucial for aerial imaging, mapping, and monitoring crop health across large fields. <xref ref-type="bibr" rid="B131">Shi et&#xa0;al. (2016)</xref> demonstrated using a gimbal-stabilized photogrammetry system mounted on a UAV for high-throughput phenotyping in agricultural research. The gimbal stabilization was critical in maintaining the orientation of the photogrammetric camera, ensuring that the captured images were free from motion blur and other distortions. This precision is essential for accurately generating digital surface models (DSMs). This study focused on measuring plant height using DSMs generated from UAV-captured images. The gimbal-stabilized setup ensured that the height measurements were accurate and reliable, providing valuable data for precision agriculture practices. This approach enables efficient large-scale data collection across uneven terrains, significantly improving the process of high-throughput phenotyping compared with traditional ground-based methods.</p>
<p>Similarly, a recent review by <xref ref-type="bibr" rid="B140">Tanaka et&#xa0;al. (2024)</xref> highlighted the advantages of using UAV-mounted sensors with gimbals for crop phenotyping. The authors emphasized that gimbal-stabilized sensors provide high-resolution images and reduce motion artifacts, thereby improving the accuracy of phenotypic data.</p>
</sec>
<sec id="s3_1_4">
<label>3.1.4</label>
<title>Handheld</title>
<p>Handheld mounting mechanisms offer the most flexibility but require manual operation, making them suitable for specific and targeted data collection tasks (<xref ref-type="bibr" rid="B110">Paulus, 2019</xref>). These setups are often used in field conditions where mobility and the ability to capture data from various locations quickly are essential (<xref ref-type="bibr" rid="B168">Zhu et&#xa0;al., 2021</xref>). Handheld sensors are beneficial for on-the-spot assessments, such as measuring plant height, leaf area, and other morphological traits. Although they lack the stability of fixed mounts or the precision of articulated arms, handheld devices allow researchers to collect data in areas that are difficult to reach or in situations where rapid assessment is required (<xref ref-type="bibr" rid="B110">Paulus, 2019</xref>). The portability of handheld mechanisms makes them ideal for exploratory research or situations in which the sensor must be moved frequently across different plants or plots (<xref ref-type="bibr" rid="B110">Paulus, 2019</xref>). <xref ref-type="bibr" rid="B163">Zermas et&#xa0;al. (2020)</xref> demonstrated the use of high-resolution RGB imagery collected with a handheld camera and UAV for 3D model processing in corn phenotyping. Their methodology involved structure from motion in reconstructing 3D canopies of small groups of corn plants, allowing for the automated extraction of phenotypic characteristics such as plant height, leaf area index (LAI), and individual leaf length. This approach provides accurate and frequent statistics for the in-season assessment of crop traits, enhancing the evaluation of crop performance and yield optimization. The handheld approach is particularly advantageous for capturing data from different parts of the plant, such as the lower canopy or areas that are not easily accessible by more extensive systems.</p>
</sec>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Sensor carrier platforms</title>
<p>Likewise, sensor carrier platform refer to mobile systems used to transport and/or dynamically position sensors within different environments for data collection. In 3D precision crop management, the effective deployment and maneuvering of sensors across the target environment is vital for capturing high-quality phenotypic data. Sensor carrier platforms are critical in this process, providing the mobility, stability, and coverage required to obtain comprehensive datasets. These platforms range from ground-based systems, such as wheeled robots and tractors, to aerial platforms, such as drones, each offering unique advantages depending on the specific requirements of the phenotyping task. The choice of carrier platform impacts the resolution, efficiency, and scalability of data collection, making it a key factor in the design of any phenotyping strategy. Building upon sensor mounting, this subsection examines the various sensor carrier platforms (<xref ref-type="fig" rid="f7"><bold>Figure&#xa0;7</bold></xref>) used in both CCP and FCP environments, highlighting their technical capabilities and applications in crop management.</p>
<fig id="f7" position="float">
<label>Figure&#xa0;7</label>
<caption>
<p>Common sensor platforms used in 3D phenotyping: <bold>(A)</bold> unmanned aerial vehicle (UAV), <bold>(B)</bold> quadruped robot, <bold>(C)</bold> wheeled robot, <bold>(D)</bold> treaded robot, <bold>(E)</bold> tractor, and <bold>(F)</bold> railed track (ground and overhead).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1731852-g007.tif">
<alt-text content-type="machine-generated">(a) Illustration of a drone with propellers and a camera mounted underneath. (b) Robotic dog with four articulated legs. (c) Automated system planting or manipulating soil. (d) Tracked robotic vehicle. (e) Tractor equipped with a sensor. (f) Two views of an automated farming system, one with a framework and one showing rows of crops.</alt-text>
</graphic></fig>
<sec id="s3_2_1">
<label>3.2.1</label>
<title>Drone (unmanned aerial vehicle, UAV)</title>
<p>In recent years, drones or UAVs have become one of the most widely used sensor carrier platforms in precision agriculture, enabling rapid large-scale data acquisition from the air. Equipped with 3D imaging sensors such as photogrammetry or LiDAR, drones can capture high-resolution data across extensive areas, providing detailed insights into plant canopy structure, biomass, and spatial variability. Their ability to cover large plots quickly and efficiently makes them particularly useful for field phenotyping in outdoor environments, especially when frequent data updates are necessary. For example, <xref ref-type="bibr" rid="B169">Zhu et&#xa0;al. (2023)</xref> demonstrated the use of extremely low-altitude UAV images for the quantitative estimation of organ-scale phenotypic parameters of field crops through 3D modeling. Their approach allowed for precise measurements of plant traits, such as leaf area and plant height, which are crucial for crop breeding and management practices. This study highlighted the potential of UAV technology to enhance the accuracy and efficiency of phenotyping processes. For a comprehensive review of drone-based imaging sensors, techniques, and applications in plant phenotyping, readers are referred to <xref ref-type="bibr" rid="B36">Gano et&#xa0;al. (2024)</xref>, which provides an extensive analysis of the current state and future trends of UAV-based plant phenotyping.</p>
</sec>
<sec id="s3_2_2">
<label>3.2.2</label>
<title>Wheeled and treaded robots</title>
<p>Wheeled and treaded (also known as tracked) robots are ground-based platforms used primarily in field environments for precision phenotyping applications. These autonomous or semi-autonomous systems can carry 3D imaging sensors at ground level, providing detailed spatial data on plant height, morphology, and structure. Wheeled robots are particularly effective for capturing close-range data, unlike aerial systems, especially for lower plant parts, such as stems and root zones. <xref ref-type="bibr" rid="B50">Iqbal et&#xa0;al. (2020a)</xref> developed a multipurpose autonomous differential drive mobile robot, MARIA, for plant phenotyping and soil sensing. The robot was designed to navigate autonomously using a global navigation satellite system (GNSS). It was fitted with an actuated LiDAR unit and depth camera to estimate plant morphological traits, such as volume and height. The robot&#x2019;s three-degree-of-freedom manipulator allowed soil sensing and sampling, making it a versatile tool for phenotyping and soil analysis. Similarly, <xref ref-type="bibr" rid="B155">Xiang et&#xa0;al. (2023)</xref> demonstrated the use of a wheeled robot, PhenoBot, for field-based robotic leaf angle detection and characterization of maize plants using stereo vision and deep convolutional neural networks. The robot was equipped with PhenoStereo cameras to capture side-view images of maize plants, allowing precise measurements of leaf angles and other phenotypic traits. This approach significantly improved the efficiency and accuracy of phenotyping compared with traditional manual methods.</p>
</sec>
<sec id="s3_2_3">
<label>3.2.3</label>
<title>Quadruped robotics</title>
<p>Quadruped robots are a newer class of sensor-carrier platforms designed to navigate complex and rugged terrains with greater flexibility and stability than wheeled or treaded robots (<xref ref-type="bibr" rid="B60">Katz et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B86">Lopes et&#xa0;al., 2023</xref>). These four-legged platforms can carry 3D sensors to capture detailed spatial data in environments where mobility is challenging, such as fields with dense vegetation and uneven ground. Their ability to traverse rough terrain makes them particularly valuable in outdoor agricultural settings, where precise data collection is required, but other robotic platforms may struggle. <xref ref-type="bibr" rid="B86">Lopes et&#xa0;al. (2023)</xref> discussed advancements in quadruped robotics, highlighting their applications in agricultural environments. According to their discussion, quadruped robots offer several advantages over traditional wheeled or treaded robots, including the ability to maintain stability on rough and uneven terrain, adaptability to different ground conditions, and enhanced maneuverability in tight spaces. The authors added that these robots are beneficial for phenotyping tasks in challenging field conditions, where other platforms may struggle. They can carry a variety of sensors, including LiDAR, cameras, and multispectral imaging systems, to collect high-resolution 3D crop data. Their study also detailed the design and development of a robotic arm specifically built to integrate with a quadruped robot for use in various agricultural applications. Quadruped robots can operate autonomously or be remotely controlled, making them versatile tools for detailed and accurate phenotyping in diverse environments (<xref ref-type="bibr" rid="B86">Lopes et&#xa0;al., 2023</xref>).</p>
</sec>
<sec id="s3_2_4">
<label>3.2.4</label>
<title>Tractors</title>
<p>Tractors are a common platform for deploying sensors in large-scale agricultural settings, often serving as sensor carriers in precision farming. Mounted with 3D imaging systems, tractors enable data collection while performing other agricultural operations, such as planting or harvesting. For instance, <xref ref-type="bibr" rid="B65">Kise and Zhang (2008)</xref> developed a field-sensing system capable of performing 3D field mapping to measure crop height and volume and detect crop rows in 3D for reliable tractor guidance using a tractor-mounted stereo camera. The core of this dual-application field-sensing system is a stereovision-based mapping method. This method creates 3D crop structure maps by estimating the motion of a tractor-mounted stereo camera and progressively stitching the constituent stereo images. In a similar study, <xref ref-type="bibr" rid="B137">Sun et&#xa0;al. (2018)</xref> developed a high-throughput phenotyping system mounted on a tractor to scan plants from overhead using 2D LiDAR and RTK-GPS for precise spatial positioning. The system effectively reconstructs 3D models of crops by separating the ground plane and removing noise from weeds to generate clean 3D surface models of cotton plants. This setup allows for the measurement of key morphological traits, such as canopy height, projected canopy area, and plant volume, directly from the tractor, demonstrating its utility in large-scale agricultural settings. The ability to repeatedly scan entire fields over a growing season highlights the capability of tractor-mounted system for efficient and accurate data collection, which is essential for modern crop breeding and management practices.</p>
</sec>
<sec id="s3_2_5">
<label>3.2.5</label>
<title>Ground and overhead rails</title>
<p>Ground and overhead rail systems are stationary or semi-stationary platforms used primarily in controlled environments, such as greenhouses or growth chambers. These systems allow sensors to move along fixed paths, capturing detailed 3D data over time without disturbing plants. Ground rails are typically used for lower- or mid-level plant phenotyping, whereas overhead rails offer a bird&#x2019;s-eye view, which is ideal for capturing canopy structure and overall plant growth patterns. These systems are highly effective in environments that require continuous noninvasive monitoring, allowing consistent data capture with minimal human intervention. <xref ref-type="bibr" rid="B78">Li et&#xa0;al. (2023)</xref> utilized a hybrid (ground and overhead) design for a field rail-based phenotyping platform to collect high-throughput, time-series raw data of maize populations using LiDAR and RGB cameras. An earlier study by <xref ref-type="bibr" rid="B142">Vadez et&#xa0;al. (2015)</xref> utilized a similar novel arrangement in a greenhouse setting to combine 3D imaging and lysimetry for the high-throughput phenotyping of traits controlling the plant water budget. This system was designed to generate 3D crop structure maps, allowing for the accurate extraction of phenotypic traits.</p>
<p>Another notable approach is the use of a cable-suspended multi-sensor system to achieve similar novel concepts in rail-based systems, such as those in the studies by <xref ref-type="bibr" rid="B64">Kirchgessner et&#xa0;al. (2016)</xref> and <xref ref-type="bibr" rid="B5">Bai et&#xa0;al. (2019)</xref>. These rail- and cable-based approaches enable precise measurements of plant height and volume, demonstrating the effectiveness of multi-source data fusion in improving the accuracy of phenotypic trait extraction.</p>
</sec>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Platform comparison and selection criteria</title>
<p>The preceding subsections have detailed individual mounting mechanisms and carrier platforms; however, selecting an appropriate platform for a given phenotyping application requires a systematic comparison across multiple criteria. <xref ref-type="table" rid="T2"><bold>Table&#xa0;2</bold></xref> synthesizes the key characteristics of the major platform categories to facilitate this selection process by comparing platforms across five dimensions: mobility and maneuverability, spatial coverage and throughput, measurement stability, cost considerations, and sensor compatibility.</p>
<table-wrap id="T2" position="float">
<label>Table&#xa0;2</label>
<caption>
<p>Platform comparison for 3D phenotyping applications.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Platform category</th>
<th valign="middle" align="center">Mobility</th>
<th valign="middle" align="center">Coverage (ha/day)</th>
<th valign="middle" align="center">Stability</th>
<th valign="middle" align="center">Capital cost</th>
<th valign="middle" align="center">Operating cost</th>
<th valign="middle" align="center">Compatible sensors</th>
<th valign="middle" align="center">Best for</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="center">Fixed Gantry</td>
<td valign="middle" align="center">None (fixed envelope)</td>
<td valign="middle" align="center">&lt;1 (but high temporal frequency)</td>
<td valign="middle" align="center">Very High (&lt;1&#xa0;mm)</td>
<td valign="middle" align="center">Very High ($500K&#x2013;2M)</td>
<td valign="middle" align="center">Low</td>
<td valign="middle" align="center">All sensor types; multi-sensor arrays</td>
<td valign="middle" align="center">Long-term plot monitoring, method development</td>
</tr>
<tr>
<td valign="middle" align="center">Tractor-mounted</td>
<td valign="middle" align="center">Moderate (row-constrained)</td>
<td valign="middle" align="center">10&#x2013;30</td>
<td valign="middle" align="center">Medium (5&#x2013;10 cm)</td>
<td valign="middle" align="center">Medium ($20K&#x2013;100K + vehicle)</td>
<td valign="middle" align="center">Medium</td>
<td valign="middle" align="center">LiDAR, RGB, multispectral, ToF</td>
<td valign="middle" align="center">Large-field canopy phenotyping</td>
</tr>
<tr>
<td valign="middle" align="center">Wheeled Robot</td>
<td valign="middle" align="center">Moderate (terrain-limited)</td>
<td valign="middle" align="center">5&#x2013;15</td>
<td valign="middle" align="center">Medium (2&#x2013;5 cm)</td>
<td valign="middle" align="center">Medium ($20K&#x2013;80K)</td>
<td valign="middle" align="center">Medium</td>
<td valign="middle" align="center">LiDAR, RGB, ToF, structured light</td>
<td valign="middle" align="center">Research plots, row crops</td>
</tr>
<tr>
<td valign="middle" align="center">Tracked Robot</td>
<td valign="middle" align="center">Good (rough terrain)</td>
<td valign="middle" align="center">3&#x2013;10</td>
<td valign="middle" align="center">Medium (5&#x2013;10 cm)</td>
<td valign="middle" align="center">High ($50K&#x2013;120K)</td>
<td valign="middle" align="center">Medium</td>
<td valign="middle" align="center">LiDAR, RGB, multispectral</td>
<td valign="middle" align="center">Challenging terrain, orchards</td>
</tr>
<tr>
<td valign="middle" align="center">Legged Robot</td>
<td valign="middle" align="center">Very Good (all terrain)</td>
<td valign="middle" align="center">2&#x2013;8</td>
<td valign="middle" align="center">Medium (5 cm&#x2013;15 cm)</td>
<td valign="middle" align="center">Very High ($75K&#x2013;150K)</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">RGB, LiDAR (payload-limited)</td>
<td valign="middle" align="center">Complex environments, demonstration</td>
</tr>
<tr>
<td valign="middle" align="center">UAV (Consumer)</td>
<td valign="middle" align="center">Excellent</td>
<td valign="middle" align="center">50&#x2013;100 (RGB)</td>
<td valign="middle" align="center">Low (10 cm&#x2013;20 cm)</td>
<td valign="middle" align="center">Low ($1K&#x2013;5K)</td>
<td valign="middle" align="center">Low</td>
<td valign="middle" align="center">RGB only</td>
<td valign="middle" align="center">Rapid canopy assessment, NDVI</td>
</tr>
<tr>
<td valign="middle" align="center">UAV (Professional)</td>
<td valign="middle" align="center">Excellent</td>
<td valign="middle" align="center">30&#x2013;80 (multispectral)</td>
<td valign="middle" align="center">Medium (5&#x2013;15 cm)</td>
<td valign="middle" align="center">Medium ($15K&#x2013;50K)</td>
<td valign="middle" align="center">Medium</td>
<td valign="middle" align="center">RGB, LiDAR</td>
<td valign="middle" align="center">Breeding trials, stress detection</td>
</tr>
<tr>
<td valign="middle" align="center">UAV (Heavy-lift)</td>
<td valign="middle" align="center">Excellent</td>
<td valign="middle" align="center">10&#x2013;30 (LiDAR)</td>
<td valign="middle" align="center">Medium (5 cm&#x2013;10 cm)</td>
<td valign="middle" align="center">High ($50K&#x2013;200K)</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">LiDAR, RGB</td>
<td valign="middle" align="center">High-resolution structural phenotyping</td>
</tr>
<tr>
<td valign="middle" align="center">Backpack/Handheld</td>
<td valign="middle" align="center">Operator-dependent</td>
<td valign="middle" align="center">1&#x2013;5</td>
<td valign="middle" align="center">Low (10 cm&#x2013;30 cm)</td>
<td valign="middle" align="center">Low-Medium ($5K&#x2013;50K)</td>
<td valign="middle" align="center">Low</td>
<td valign="middle" align="center">LiDAR, RGB, handheld scanners</td>
<td valign="middle" align="center">Targeted sampling, validation</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>Coverage estimates assume standard operating conditions and typical sensor configurations. Capital costs include platform and basic sensor payload; high-end sensors may substantially increase total system cost. Stability reflects typical effective positioning accuracy under operational conditions. Operating costs consider labor, consumables, and maintenance but exclude data processing. UAV operations subject to regulatory constraints that vary by jurisdiction.</p></fn>
</table-wrap-foot>
</table-wrap>
<sec id="s3_3_1">
<label>3.3.1</label>
<title>Mobility and maneuverability</title>
<p>Platform mobility fundamentally constrains the types of environment and crops that can be phenotyped. Fixed gantry systems offer no mobility but provide precisely controlled sensor positioning within their operational envelope, making them optimal for repeated measurements of the same experimental plots over time. Ground-based mobile platforms (tractors, wheeled robots, and tracked vehicles) provide moderate mobility, which is constrained by row spacing, soil conditions, and crop height. Legged robots offer superior terrain adaptability but at a substantially higher cost and complexity. UAV platforms provide maximum mobility and can access crops at any growth stage, although flight time limitations (typically 20 min&#x2013;40 min) constrain single-mission coverage.</p>
</sec>
<sec id="s3_3_2">
<label>3.3.2</label>
<title>Spatial coverage and throughput</title>
<p>Throughput requirements vary dramatically between breeding trials (requiring the phenotyping of thousands of plots) and physiological studies (requiring the detailed characterization of individual plants). UAV platforms achieve the highest throughput for canopy-level traits, capable of covering 50 ha&#x2013;100 ha per day with RGB photogrammetry or 10 ha&#x2013;30 ha with heavier LiDAR payloads (<xref ref-type="bibr" rid="B131">Shi et&#xa0;al., 2016</xref>). Ground-based mobile platforms achieve intermediate throughput (5 ha&#x2013;20 ha per day, depending on driving speed and row spacing) while maintaining a higher spatial resolution. Fixed gantry systems have inherently limited coverage but enable the highest temporal resolution through the automated and repeated scanning of the same plots.</p>
</sec>
<sec id="s3_3_3">
<label>3.3.3</label>
<title>Measurement stability</title>
<p>Stability directly affects the achievable measurement precision. Fixed gantry systems provide the highest stability, with a sensor positioning repeatability typically below 1&#xa0;mm. Ground-based platforms introduce vibration and position uncertainty that can be partially compensated for through gimbal stabilization and RTK-GNSS positioning, achieving effective stability of 1 cm&#x2013;5 cm. UAV platforms face the greatest stability challenges due to wind effects, GPS drift, and gimbal limitations, with effective positioning stability typically 5 cm&#x2013;20 cm, depending on the conditions and equipment quality.</p>
</sec>
<sec id="s3_3_4">
<label>3.3.4</label>
<title>Cost considerations</title>
<p>The platform costs span several orders of magnitude. Consumer UAVs with integrated RGB cameras represent the lowest-cost entry point (&lt;$2,000), while research-grade UAV-LiDAR systems range from $50,000 to $200,000. Ground-based robotic platforms range from $20,000 for simple wheeled systems to $100,000+ for advanced legged robots. Fixed gantry systems represent the highest capital investment ($500,000&#x2013;$2,000,000 for field-scale installations) but offer the lowest per-measurement operational costs for long-term studies. Importantly, the initial platform cost often represents a minority of the total phenotyping costs when labor, data processing, and maintenance are considered.</p>
</sec>
<sec id="s3_3_5">
<label>3.3.5</label>
<title>Sensor compatibility</title>
<p>Platform payload capacity constrains the sensor options. UAVs face the most severe limitations, with consumer-grade systems (&lt;5&#xa0;kg payload) restricted to RGB cameras and lightweight multispectral sensors, whereas larger UAVs (5 kg&#x2013;15 kg payload) can accommodate LiDAR or hyperspectral sensors. Ground-based platforms typically support all sensor types without significant payload constraints. Fixed gantry systems offer maximum flexibility for multi-sensor integration, commonly deploying arrays of RGB, hyperspectral, thermal, and LiDAR sensors simultaneously.</p>
<p>The optimal platform choice depends on the specific balance of these factors for each application of interest. UAV-based systems offer the best cost-effectiveness for large-scale breeding programs that prioritize throughput over precision. For detailed physiological studies requiring organ-level measurements, ground-based systems or fixed gantries provide the necessary precision. For phenotyping in challenging environments (sloped terrain, young crops, post-lodging), legged robots or handheld systems may be the only viable solutions.</p>
</sec>
</sec>
</sec>
<sec id="s4">
<label>4</label>
<title>Geometry phenotypes</title>
<p>Selecting appropriate phenotypic traits is critical for leveraging modern sensor technology in both CCP and FCP phenotyping environments. The target end users of this information, including breeders and farmers, may require different, yet accurate and reliable data to assess and/or inform decisions on crop improvement and management strategies. This section reviews the key morphological and geometric traits of crops which are divided into two main categories: Canopy Architecture and Root Architecture (<xref ref-type="fig" rid="f8"><bold>Figure&#xa0;8</bold></xref>). Each architectural characteristic is discussed in terms of its relevance, measurability, and challenges posed by CCP and FCP environments.</p>
<fig id="f8" position="float">
<label>Figure&#xa0;8</label>
<caption>
<p>Variation in canopy and root architecture among common crop plants: <bold>(A)</bold> soybean, <bold>(B)</bold> rice, <bold>(C)</bold> corn, <bold>(D)</bold> ginseng, and <bold>(E)</bold> onion.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1731852-g008.tif">
<alt-text content-type="machine-generated">Five plant types illustrating canopy and root systems. (a) Broadleaf canopy with fibrous roots, (b) grass-like canopy with fibrous roots, (c) elongated canopy with extensive root network, (d) simple leaf canopy with taproot, (e) bulb with sparse roots. Each plant shows distinct canopy and root characteristics.</alt-text>
</graphic></fig>
<sec id="s4_1">
<label>4.1</label>
<title>Canopy architecture</title>
<p>Canopy architecture refers to the spatial configuration of a plant&#x2019;s aboveground organs, encompassing traits such as plant height, tillering, leaf area index, and the overall arrangement of leaves and branches (<xref ref-type="bibr" rid="B30">Fageria et&#xa0;al., 2006</xref>). These traits play a pivotal role in determining how effectively a crop intercepts light, utilizes resources, and withstands environmental stresses, which are critical for optimizing crop yields and efficiency. For breeders and farmers, understanding and optimizing canopy architecture is essential for improving genotype performance and enabling precision crop management.</p>
<p>The architecture of a plant(s) canopy is intrinsically linked to its ability to intercept solar radiation, which drives photosynthesis and ultimately determines crop productivity (<xref ref-type="bibr" rid="B162">Zegada-Lizarazu et&#xa0;al., 2012</xref>). According to <xref ref-type="bibr" rid="B30">Fageria et&#xa0;al. (2006)</xref>, key traits such as plant height, leaf orientation, and tillering influence the distribution of light within the canopy, thereby affecting photosynthetic efficiency and resource use. For instance, a canopy&#x2019;s stem characteristics, which include stem height, branching pattern, internode length, and stem diameter, are crucial for determining a plant&#x2019;s growth, stability, and overall productivity. Taller plants with robust stems and erect leaves capture more sunlight, particularly in dense planting conditions, where lower leaves may otherwise be shaded. This increased light interception is vital for photosynthetic efficiency, driving better crop performance (<xref ref-type="bibr" rid="B30">Fageria et&#xa0;al., 2006</xref>; <xref ref-type="bibr" rid="B29">Evans, 2013</xref>). However, excessive stem height can lead to a higher risk of lodging, where plants may bend or collapse under their weight or due to external forces such as wind, significantly reducing crop yield and quality. In contrast, shorter plants with compact canopies may be preferred in environments prone to lodging because they are less likely to be damaged by strong winds or heavy rain. Similarly, a well-structured branching and tillering pattern enhances light penetration throughout the canopy, further promoting plant productivity. Conversely, excessive branching and/or tillering can lead to self-shading, which reduces light availability to the lower leaves and potentially hinders growth. Moreover, after panicle development in most cereal crops, competition for photoassimilates begins between panicles and tillers (<xref ref-type="bibr" rid="B30">Fageria et&#xa0;al., 2006</xref>).</p>
<p>Internode length and stem diameter also play key roles in the canopy architecture. Shorter internodes result in a more compact plant structure, which can be advantageous in environments where space is limited or where plants must resist lodging (<xref ref-type="bibr" rid="B19">Dahiya et&#xa0;al., 2018</xref>). However, longer internodes might improve light capture and increase lodging risk. A thicker stem provides greater mechanical support, reducing the risk of lodging and enabling the plant to support larger reproductive structures, such as fruits or grain heads. Additionally, stem diameter is associated with the plant&#x2019;s capacity for nutrient and water transport, which is critical for sustaining growth and development under varying environmental conditions (<xref ref-type="bibr" rid="B30">Fageria et&#xa0;al., 2006</xref>).</p>
<p>Likewise, a canopy&#x2019;s leaf characteristics, including leaf erectness (and/or angle), length, width, and thickness, play crucial roles in determining the overall yield potential of plant species or cultivars. Erect leaf orientation is critical, as it allows for greater light penetration and a more even distribution of sunlight within the crop canopy, thereby enhancing photosynthetic efficiency and increasing yield. This trait is often associated with higher-yielding varieties, as erect leaves reduce shading on lower leaves, enabling a more effective use of light (<xref ref-type="bibr" rid="B30">Fageria et&#xa0;al., 2006</xref>). <xref ref-type="bibr" rid="B30">Fageria et&#xa0;al. (2006)</xref> also argue that leaf thickness, which correlates with higher chlorophyll density per unit area, is linked to increased photosynthetic capacity and long-term gains in crop productivity. The size and angle of leaves are also significant; shorter, more erect leaves tend to distribute light more evenly and are less prone to drooping, which can reduce photosynthetic efficiency in taller cultivars than in shorter ones.</p>
<p>Although less variable than length, leaf width also contributes to yield by influencing the distribution of leaves within the canopy of the plant. Narrow leaves are generally preferred because they allow for more uniform light distribution, minimizing shading, and maximizing photosynthesis across the plant. Other important but non-morphological leaf characteristics include toughness, which is essential for preventing damage from wind and rain; color, an important indicator of plant health and nutrient status, with darker green leaves typically reflecting higher chlorophyll content and greater photosynthetic activity and senescence, the process of leaf aging that impacts the duration of photosynthetic activity. Early or rapid senescence can significantly reduce crop productivity by decreasing the photosynthetically active leaf area before the plant reaches its full yield potential (<xref ref-type="bibr" rid="B30">Fageria et&#xa0;al., 2006</xref>).</p>
</sec>
<sec id="s4_2">
<label>4.2</label>
<title>Root architecture</title>
<p>Root architecture, which refers to the spatial configuration of a plant&#x2019;s root system, is a critical aspect of crop morphology that significantly influences the physiological aspects of plant growth and yield. The architecture includes key traits such as root length, diameter, surface area, and the distribution of root hairs, which collectively determine the plant&#x2019;s ability to absorb water and nutrients from the soil. Both genetic factors and environmental conditions, such as soil type, moisture levels, and nutrient availability, shape the complexity and dynamics of root systems. For example, the effectiveness of a root system in nutrient uptake, particularly for relatively immobile nutrients such as phosphorus, is heavily dependent on the root&#x2019;s surface area and its ability to efficiently explore soil volume (<xref ref-type="bibr" rid="B139">Takahashi and Pradal, 2021</xref>).</p>
<p>The root system plays a multifaceted role in supporting the plant by anchoring it, providing mechanical stability, and facilitating the absorption and transport of water, nutrients, and growth hormones to the shoots. Root architecture is also vital for plant responses to environmental stresses, such as drought or nutrient deficiencies. Plants with well-developed root systems are typically more resilient, as they can access deeper soil layers where water and nutrients are more abundant (<xref ref-type="bibr" rid="B30">Fageria et&#xa0;al., 2006</xref>). Despite its importance, root architecture has historically been studied less than aboveground plant structures because of the challenges involved in accessing and analyzing roots <italic>in situ</italic>. A more detailed discussion of root phenotyping and approaches can be found in <xref ref-type="bibr" rid="B91">Lynch (2022)</xref>; <xref ref-type="bibr" rid="B139">Takahashi and Pradal (2021)</xref>, and <xref ref-type="bibr" rid="B151">Wasaya et&#xa0;al. (2018)</xref>.</p>
</sec>
<sec id="s4_3">
<label>4.3</label>
<title>Technology-trait suitability mapping</title>
<p>The selection of an appropriate 3D sensing technology for phenotyping specific architectural traits depends on multiple factors, including the spatial scale of measurement (organ, plant, canopy, or population level), required measurement precision, operational environment (CCP <italic>vs</italic>. FCP), and practical constraints, including cost, throughput, and technical expertise. This subsection provides explicit guidance for matching the sensing technologies reviewed in Section 2 with the phenotypic traits discussed above, addressing a critical decision point for researchers and practitioners designing phenotyping workflows.</p>
<sec id="s4_3_1">
<label>4.3.1</label>
<title>Scale-dependent technology selection</title>
<p>Phenotypic traits can be conceptualized across a hierarchy of spatial scales, each requiring different sensing approaches:</p>
<p><italic>Organ-level traits</italic> (leaf dimensions, leaf angle, petal thickness, and internode length) require high spatial resolution and typically benefit from close-range sensing. Laser triangulation (LTS) is optimal for this scale in CCP environments, achieving micrometer-level precision that is suitable for detecting subtle morphological differences (<xref ref-type="bibr" rid="B112">Paulus et&#xa0;al., 2014</xref>). Structured light (SL) offers a lower-cost alternative with slightly reduced precision but faster acquisition times (<xref ref-type="bibr" rid="B98">Nam et&#xa0;al., 2014</xref>). For FCP applications at this scale, sensing options are limited; high-resolution MVS from ground-based platforms or ToF cameras integrated with robotic systems offer the most practical solutions, although with reduced accuracy compared to CCP alternatives.</p>
<p><italic>Plant-level traits</italic> (plant height, stem diameter, branching pattern, and tiller count) are the most common phenotyping targets and are accessible to a broader range of technologies. In the CCP, all six reviewed technologies can address this scale, with the MVS offering the best balance of accuracy, cost, and throughput. In FCP, TLS (particularly mobile and backpack configurations) and MVS (both ground- and UAV-based) provide practical solutions with centimeter-level accuracy that is sufficient for breeding applications.</p>
<p><italic>Canopy-level traits</italic> (canopy height, canopy cover, LAI, and canopy volume) require the coverage of larger areas and benefit from elevated sensing platforms. UAV-based MVS and LiDAR are the dominant technologies for FCP at this scale, capable of phenotyping hundreds of plots per day with an accuracy adequate for genetic analysis (R&#xb2; &gt;0.85 for height; <xref ref-type="bibr" rid="B93">Madec et&#xa0;al., 2017</xref>). Ground-based TLS can achieve higher precision but with reduced throughput. In the CCP, gantry-mounted LTS systems (e.g., Field Scanalyzer) combine the advantages of a controlled sensing geometry with plot-level coverage.</p>
<p><italic>Population-level traits</italic> (spatial distribution patterns, lodging assessment and growth uniformity) require the largest spatial coverage and are predominantly the domain of UAV-based sensing. At this scale, MVS and LiDAR from UAV platforms provide the only practical solutions for both CCP and FCP, although the distinction between these environments becomes less meaningful when assessing population-scale phenomena.</p>
</sec>
<sec id="s4_3_2">
<label>4.3.2</label>
<title>Technology-trait suitability matrix</title>
<p><xref ref-type="table" rid="T3"><bold>Table&#xa0;3</bold></xref> provides a structured mapping of the technology suitability for specific phenotypic traits across CCP and FCP environments. Suitability ratings reflect the synthesis of published validation studies, considering both achievable accuracy and practical deployment.</p>
<table-wrap id="T3" position="float">
<label>Table&#xa0;3</label>
<caption>
<p>Technology-trait suitability matrix for 3D plant phenotyping.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Trait category</th>
<th valign="middle" align="center">Specific trait</th>
<th valign="middle" align="center">LTS</th>
<th valign="middle" align="center">MVS</th>
<th valign="middle" align="center">ToF</th>
<th valign="middle" align="center">TLS</th>
<th valign="middle" align="center">SL</th>
<th valign="middle" align="center">LF</th>
<th valign="middle" align="center">Optimal scale</th>
<th valign="middle" align="center">CCP suitability</th>
<th valign="middle" align="center">FCP suitability</th>
</tr>
</thead>
<tbody>
<tr>
<th valign="middle" colspan="11" align="center">Canopy architecture traits</th>
</tr>
<tr>
<td valign="middle" rowspan="5" align="center">Stem traits</td>
<td valign="middle" align="left">Plant height</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Plant/Canopy</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">High</td>
</tr>
<tr>
<td valign="middle" align="left">Stem diameter</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Organ/Plant</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">Medium</td>
</tr>
<tr>
<td valign="middle" align="left">Internode length</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Organ</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">Low</td>
</tr>
<tr>
<td valign="middle" align="left">Branching pattern</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Plant</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">Medium</td>
</tr>
<tr>
<td valign="middle" align="left">Tiller count</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Plant</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">Medium</td>
</tr>
<tr>
<td valign="middle" rowspan="4" align="center">Leaf traits</td>
<td valign="middle" align="left">Leaf area</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Organ/Plant</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">Medium</td>
</tr>
<tr>
<td valign="middle" align="left">Leaf angle</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="center">Organ</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">Low</td>
</tr>
<tr>
<td valign="middle" align="left">Leaf length/width</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Organ</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">Low</td>
</tr>
<tr>
<td valign="middle" align="left">Leaf thickness</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Organ</td>
<td valign="middle" align="center">Medium</td>
<td valign="middle" align="center">Not feasible</td>
</tr>
<tr>
<td valign="middle" rowspan="5" align="center">Canopy traits</td>
<td valign="middle" align="left">Canopy height</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Canopy</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">High</td>
</tr>
<tr>
<td valign="middle" align="left">Canopy volume</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Canopy</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">High</td>
</tr>
<tr>
<td valign="middle" align="left">Canopy cover</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Canopy/Pop</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">High</td>
</tr>
<tr>
<td valign="middle" align="left">LAI</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Canopy</td>
<td valign="middle" align="center">Medium</td>
<td valign="middle" align="center">Medium</td>
</tr>
<tr>
<td valign="middle" align="left">Light interception</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Canopy</td>
<td valign="middle" align="center">Medium</td>
<td valign="middle" align="center">Medium</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">Reproductive</td>
<td valign="middle" align="left">Ear/panicle volume</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Organ</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">Low</td>
</tr>
<tr>
<td valign="middle" align="left">Fruit count</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Plant</td>
<td valign="middle" align="center">Medium</td>
<td valign="middle" align="center">Low</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">Stress response</td>
<td valign="middle" align="left">Lodging assessment</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Canopy/Pop</td>
<td valign="middle" align="center">Low</td>
<td valign="middle" align="center">High</td>
</tr>
<tr>
<td valign="middle" align="left">Wilting detection</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Plant</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">Medium</td>
</tr>
<tr>
<th valign="middle" colspan="11" align="center">Root architecture traits</th>
</tr>
<tr>
<td valign="middle" rowspan="4" align="center">Root traits</td>
<td valign="middle" align="left">Root length</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Organ</td>
<td valign="middle" align="center">High (rhizotron)</td>
<td valign="middle" align="center">Not feasible*</td>
</tr>
<tr>
<td valign="middle" align="left">Root diameter</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x2022;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Organ</td>
<td valign="middle" align="center">High (rhizotron)</td>
<td valign="middle" align="center">Not feasible*</td>
</tr>
<tr>
<td valign="middle" align="left">Root angle</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Organ</td>
<td valign="middle" align="center">Medium</td>
<td valign="middle" align="center">Not feasible*</td>
</tr>
<tr>
<td valign="middle" align="left">Root system architecture</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x2022;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x2022;&#x25cb;&#x25cb;</td>
<td valign="middle" align="left">&#x25cb;&#x25cb;&#x25cb;</td>
<td valign="middle" align="center">Plant</td>
<td valign="middle" align="center">High (CT/MRI)</td>
<td valign="middle" align="center">Low (GPR/ERT)*</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>&#x2022;&#x2022;&#x2022;Highly suitable (validated accuracy, practical deployment); &#x2022;&#x2022;&#x25cb;Moderately suitable (achievable with limitations), &#x2022;&#x25cb;&#x25cb;Limited suitability (significant constraints); &#x25cb;&#x25cb;&#x25cb;Not suitable or not validated;</p></fn>
<fn>
<p>CCP/FCP ratings: High/Medium/Low/Not feasible</p></fn>
<fn>
<p>Field root phenotyping requires specialized technologies (GPR, ERT, excavation) not covered in this review&#x2019;s scope.</p></fn>
<fn>
<p>Suitability ratings synthesize published validation studies and practical considerations. Technology abbreviations: LTS, Laser Triangulation; MVS, Multiview Stereo; ToF, Time-of-Flight; TLS, Terrestrial Laser Scanning; SL, Structured Light; LF, Light Field. Optimal scale indicates the spatial resolution at which each trait is typically assessed. CCP and FCP suitability reflect both achievable accuracy and practical deployment constraints in each environment.</p></fn>
</table-wrap-foot>
</table-wrap>
</sec>
<sec id="s4_3_3">
<label>4.3.3</label>
<title>CCP versus FCP trait measurability</title>
<p>A critical consideration in phenotyping workflow design is that the range of measurable traits differs substantially between the CCP and FCP environments. Under controlled conditions, fine-scale organ traits (such as leaf thickness, leaf surface texture, and small reproductive structures) are accessible using high-precision sensing technologies. These measurements are often not achievable under field conditions because of the reduced resolution at greater sensing distances, environmental interference, and the inability to isolate individual organs within dense canopies.</p>
<p>Conversely, certain traits manifest differently or are only meaningful under specific field conditions. For example, lodging susceptibility requires wind and water stress in field environments. CCP assessments of lodging-related traits (stem diameter, plant height, and root anchorage) provide only indirect indicators of the actual lodging risk. Similarly, canopy-level light interception dynamics, competitive plant&#x2013;plant interactions, and responses to natural stress gradients require FCP assessment for agronomically relevant characterization.</p>
<p>This complementarity reinforces the need for integrated phenotyping strategies that leverage CCP for high-precision organ-level characterization and algorithm development while using FCP to validate trait expression and assess genotype-by-environment interactions under realistic conditions (<xref ref-type="bibr" rid="B116">Poorter et&#xa0;al., 2016</xref>). The technology-trait mapping provided in this study can guide researchers in selecting appropriate sensing solutions for each component of the integrated workflows.</p>
<p>As the technology-trait mapping above illustrates, quantifying both canopy and root architecture remains a significant challenge in plant phenotyping, particularly under field conditions, owing to the inherent complexity and variability of plant structures. The suitability ratings in <xref ref-type="table" rid="T3"><bold>Table&#xa0;3</bold></xref> reflect not only the sensor capabilities but also the practical constraints that emerge when transitioning from controlled to field environments in real-world applications. With its intricate interplay of traits, canopy architecture presents difficulties in measurement because of environmental factors such as light variability and wind in field conditions, as well as plant density and dynamic spacing in the field. Similarly, root architecture is notoriously difficult to assess because of its hidden nature, soil heterogeneity, and the destructive nature of traditional excavation methods. While manual measurement remains common in both cases, it is labor-intensive, time-consuming, and lacks precision.</p>
<p>However, recent advancements in 3D phenotyping technologies have revolutionized the study of canopy and root structures offering non-invasive and scalable solutions that provide high-resolution data. In CCP environments, technologies such as LiDAR (<xref ref-type="bibr" rid="B16">Cao et&#xa0;al., 2017</xref>), photogrammetry (<xref ref-type="bibr" rid="B37">Gao et&#xa0;al., 2024</xref>; D. <xref ref-type="bibr" rid="B158">Yang et&#xa0;al., 2024</xref>), rhizotrons (<xref ref-type="bibr" rid="B84">Lobet et&#xa0;al., 2011</xref>), and X-ray computed tomography (<xref ref-type="bibr" rid="B152">Wu and Guo, 2014</xref>) facilitate detailed and accurate trait measurements. These environments allow for the isolation of individual plants and the creation of stable and consistent conditions, making it easier to achieve precision. For canopy architecture, this implies capturing detailed plant morphologies under consistent lighting. In contrast, technologies such as transparent soil systems and hydroponics enable the study of root development with minimal interference. However, despite their advantage CCP measurements may not fully replicate the complexities of natural environments, limiting the applicability of these findings to field conditions.</p>
<p>In contrast, FCP presents significant challenges for both canopy and root architecture measurements, but is crucial for understanding crop performance under real-world conditions. In field settings, environmental variability, soil heterogeneity, and plant interactions complicate the measurement consistency and accuracy of measurements. Canopy phenotyping in the field benefits from the use of UAVs, quadruped robotics, and LiDAR, enabling large-scale data collection across entire crop fields. These technologies offer practical solutions for medium- to high-resolution canopy measurements in dynamic outdoor settings. For root phenotyping, advanced tools such as ground-penetrating radar (GPR) (<xref ref-type="bibr" rid="B82">Liu et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B85">Lombardi et&#xa0;al., 2021</xref>) and electrical resistivity tomography (ERT) (<xref ref-type="bibr" rid="B114">Peruzzo et&#xa0;al., 2020</xref>) are helping researchers overcome the difficulties of non-invasive root system analysis. These field-deployed technologies enable large-scale, high-resolution phenotyping without disrupting the plant&#x2013;soil system, offering more accurate reflections of natural root development.</p>
<p>In both canopy and root architecture studies, integrating advanced imaging technologies is critical for bridging the gap between precision in controlled environments and field scalability. Although field-based phenotyping poses greater technical challenges, it is indispensable for evaluating the actual performance of crop varieties under realistic growing conditions, and continued advancements in sensor technologies will further enhance our ability to capture these complex phenotypes.</p>
</sec>
</sec>
</sec>
<sec id="s5" sec-type="discussion">
<label>5</label>
<title>Discussions and future perspectives</title>
<sec id="s5_1">
<label>5.1</label>
<title>Overview of key findings</title>
<p>This review systematically compared 3D phenotyping technologies across Chamber-Crop Phenotyping (CCP) and Field-Crop Phenotyping (FCP) environments, revealing the fundamental trade-offs that shape technology selection for different research and breeding objectives. Additionally, this synthesis provides cross-cutting insights that can guide practitioners in selecting appropriate solutions for their specific phenotyping requirements.</p>
<p>The most consistent finding across all technologies was an inverse relationship between measurement precision and operational scalability. In CCP environments, technologies such as laser triangulation (LTS) and structured light (SL) achieve micrometer-level accuracy (14 &#xb5;m&#x2013;45 &#xb5;m for LTS; <xref ref-type="bibr" rid="B112">Paulus et&#xa0;al., 2014</xref>), enabling detailed organ-level measurements, including petal&#xa0;thickness, leaf surface geometry, and fine-scale growth dynamics of plants. However, these high-precision approaches are fundamentally limited to single-plant or small-batch applications because of their short operational range, sensitivity to environmental conditions, and time-consuming nature of data acquisition protocols.</p>
<p>Conversely, FCP-oriented deployments sacrifice fine-scale precision for field-relevant scalability. UAV-mounted MVS and mobile TLS systems can cover hectares within hours, achieving centimeter-level accuracy sufficient for canopy-level trait extraction (R&#xb2; = 0.78&#x2013;0.99 for height and biomass estimates; <xref ref-type="bibr" rid="B63">Kim et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B168">Zhu et&#xa0;al., 2021</xref>). This precision degradation, from micrometers to centimeters, reflects not only sensor limitations but also the compounding effects of environmental variability, platform instability, and increased measurement distances inherent to field operations.</p>
<p>A critical insight from this comparative analysis is that environmental robustness, rather than theoretical accuracy, often determines the practical suitability of a technology. Technologies that exhibit high performance in controlled settings may fail catastrophically under field conditions. Structured light sensors, for example, achieve excellent results in laboratory environments (&lt;13&#xa0;mm error; <xref ref-type="bibr" rid="B101">Nguyen et&#xa0;al., 2015</xref>), but are severely compromised by ambient sunlight, restricting their field deployment to dawn, dusk, or nighttime operations (<xref ref-type="bibr" rid="B120">Rosell-Polo et&#xa0;al., 2015</xref>). Similarly, light-field cameras demonstrate interesting capabilities for post-capture refocusing but suffer from a limited effective depth range (10 cm&#x2013;50 cm) and computational demands that preclude routine field applications (<xref ref-type="bibr" rid="B127">Schima et&#xa0;al., 2016</xref>).</p>
<p>In contrast, terrestrial laser scanning (TLS) and LiDAR-based approaches exhibit superior robustness across lighting conditions because of their active illumination and independence from ambient light. This environmental resilience, combined with mature processing algorithms and commercial availability, explains the growing adoption of TLS for field phenotyping, despite the higher equipment costs. The recent development of backpack-mounted LiDAR systems (<xref ref-type="bibr" rid="B168">Zhu et&#xa0;al., 2021</xref>) represents a significant advancement in bridging the gap between TLS precision and field-scale mobility.</p>
<p>Rather than viewing CCP and FCP as competing approaches, this review highlights their fundamentally complementary roles in phenotyping pipelines. CCP environments remain essential for (i) early stage trait discovery and method development, where environmental control enables the isolation of specific treatment effects, (ii) high-precision validation of genotype&#x2013;phenotype associations requiring organ-level measurements, and (iii) algorithm training and sensor calibration prior to field deployment.</p>
<p>FCP provides irreplaceable value for (i) evaluating genotype-by-environment interactions under realistic growing conditions, (ii) capturing population-level variation across large breeding trials, and (iii) assessing traits that only manifest under field stresses, including wind, variable irrigation, and natural pest pressure. The poor correlation between controlled environment and field phenotypic data documented by <xref ref-type="bibr" rid="B116">Poorter et&#xa0;al. (2016)</xref>, a meta-analysis finding that forms a sobering backdrop for this review, underscores that neither approach can substitute for the other.</p>
<p>The synthesis of the current literature reveals an emerging trend toward multi-sensor integration and data fusion approaches. Combining TLS or LiDAR structural data with hyperspectral or thermal imaging enables the simultaneous capture of geometric and physiological traits, providing a more comprehensive phenotypic characterization than any single modality (<xref ref-type="bibr" rid="B25">Dilmurat et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B49">Huang et&#xa0;al., 2018</xref>). This multi-sensor paradigm addresses a key limitation of geometry-only phenotyping: the inability to directly assess plant physiological status using structural data alone.</p>
<p>Based on this comparative analysis, we offer the following guidance for technology selection: (i) for organ-level trait measurement requiring micrometer precision, laser triangulation or high-end MVS systems in controlled environments remain optimal; (ii) for plot-level field phenotyping emphasizing throughput and environmental robustness, TLS (including mobile and backpack configurations) or UAV-based MVS provide the best balance of accuracy and scalability; (iii) for real-time monitoring applications requiring high temporal frequency, ToF cameras offer advantages in acquisition speed despite lower spatial resolution; and (iv) for cost-constrained applications, low-cost MVS systems using consumer-grade cameras provide accessible entry points, although with increased processing requirements and reduced accuracy compared to active sensing alternatives.</p>
<p>The quantitative synthesis presented in <xref ref-type="table" rid="T1"><bold>Table&#xa0;1</bold></xref> provides a structured reference for these comparisons, enabling researchers to identify technologies that match their specific requirements for accuracy, throughput, platform compatibility, and environmental conditions.</p>
</sec>
<sec id="s5_2">
<label>5.2</label>
<title>Temporal resolution and throughput considerations</title>
<p>Beyond spatial accuracy and trait coverage, temporal resolution, the frequency at which measurements can be repeated, is a critical but often underappreciated technological characteristic. The ability to capture plant growth dynamics, diurnal patterns, and stress responses fundamentally depends on the measurement frequency, which varies substantially across different technologies and platforms (<xref ref-type="table" rid="T4"><bold>Table&#xa0;4</bold></xref>).</p>
<table-wrap id="T4" position="float">
<label>Table&#xa0;4</label>
<caption>
<p>Temporal characteristics of 3D phenotyping technologies.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Technology</th>
<th valign="middle" align="left">Single-plant acquisition</th>
<th valign="middle" align="left">Plot-level acquisition</th>
<th valign="middle" align="left">Field-scale throughput</th>
<th valign="middle" align="left">Limiting factor</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="center">LTS</td>
<td valign="middle" align="left">1 min&#x2013;10 min</td>
<td valign="middle" align="left">10 min&#x2013;30 min</td>
<td valign="middle" align="left">&lt;1 ha/day</td>
<td valign="middle" align="left">Scan time, repositioning</td>
</tr>
<tr>
<td valign="middle" align="center">MVS</td>
<td valign="middle" align="left">1 min&#x2013;5 min (multi-view)</td>
<td valign="middle" align="left">5 min&#x2013;15 min</td>
<td valign="middle" align="left">20 ha/day&#x2013;100 ha/day (UAV)</td>
<td valign="middle" align="left">Processing, flight time</td>
</tr>
<tr>
<td valign="middle" align="center">ToF</td>
<td valign="middle" align="left">Real-time</td>
<td valign="middle" align="left">Real-time + movement</td>
<td valign="middle" align="left">5 ha/day&#x2013;20 ha/day (robot)</td>
<td valign="middle" align="left">Platform speed</td>
</tr>
<tr>
<td valign="middle" align="center">TLS</td>
<td valign="middle" align="left">5 min&#x2013;30 min</td>
<td valign="middle" align="left">30 min&#x2013;60 min (multi-position)</td>
<td valign="middle" align="left">5 ha/day&#x2013;30 ha/day (mobile)</td>
<td valign="middle" align="left">Scan positions, data volume</td>
</tr>
<tr>
<td valign="middle" align="center">SL</td>
<td valign="middle" align="left">&lt;5 s</td>
<td valign="middle" align="left">1 min&#x2013;5 min</td>
<td valign="middle" align="left">N/A (CCP only practical)</td>
<td valign="middle" align="left">Lighting conditions</td>
</tr>
<tr>
<td valign="middle" align="center">LF</td>
<td valign="middle" align="left">&lt;1 s</td>
<td valign="middle" align="left">1 min&#x2013;5 min</td>
<td valign="middle" align="left">N/A (experimental)</td>
<td valign="middle" align="left">Processing, limited range</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>Acquisition times assume standard sensor configurations and exclude data processing time. Field-scale throughput assumes appropriate platform (UAV for MVS, mobile platform for TLS). Real-time acquisition enables video-rate capture but generates substantial data volumes requiring downstream processing. Actual throughput varies with experimental design, crop characteristics, and operator experience.</p></fn>
</table-wrap-foot>
</table-wrap>
<p>At one extreme, time-of-flight cameras and RGB video systems enable continuous real-time acquisition at 30&#x2013;60 frames per second, supporting the analysis of rapid plant movements, including leaf heliotropism, nyctinasty, and wind-induced motion (<xref ref-type="bibr" rid="B10">Biskup et&#xa0;al., 2007</xref>). Such a high temporal resolution is achievable only for stationary single-plant setups in CCP environments. At the other extreme, UAV-based photogrammetric surveys of large field trials may be conducted weekly or bi-weekly, constrained by flight planning, weather windows, and data processing capacity, rather than by fundamental sensor limitations.</p>
<p>Between these extremes, most phenotyping systems operate at intermediate temporal resolutions determined by the acquisition time, repositioning requirements, and processing throughput. Fixed gantry systems, such as the Field Scanalyzer, can achieve daily or twice-daily scans of experimental plots (<xref ref-type="bibr" rid="B146">Virlet et&#xa0;al., 2016</xref>), enabling the detection of growth rate differences and stress onset. Mobile ground platforms typically achieve plot revisit intervals of 2&#x2013;7 days for large breeding trials. The critical trade-off involves spatial resolution versus temporal frequency: systems optimized for detailed organ-level measurements generally sacrifice throughput, whereas high-throughput field systems sacrifice spatial detail.</p>
<p>For dynamic phenotyping applications, such as tracking growth rates, stress responses, or developmental transitions, temporal resolution may outweigh spatial resolution in importance. A 10-day measurement interval may entirely miss critical growth windows, whereas daily measurements at reduced spatial resolution can capture phenological differences essential for breeding selection. This trade-off should explicitly inform technology and platform selection based on the specific biological questions being addressed, with growth-rate-sensitive applications prioritizing temporal frequency and morphological characterization prioritizing spatial details.</p>
</sec>
<sec id="s5_3">
<label>5.3</label>
<title>Technological advancements</title>
<p>Over time, technological advancements have significantly enhanced the capabilities of 3D phenotyping in both CCP and FCP environments. In particular, the development of high-resolution 3D sensing technologies, such as LiDAR and SL systems, has enabled precise and comprehensive data collection across various plant traits. These sensors enable the generation of detailed 3D models of crops, capturing key features such as plant height, canopy volume, and biomass distribution, which are critical for evaluating crop performance and health.</p>
<p>In CCP, the integration of laser triangulation and SL systems has been particularly effective for close-range phenotyping (<xref ref-type="bibr" rid="B73">Lee et&#xa0;al., 2013</xref>; <xref ref-type="bibr" rid="B102">Nguyen et&#xa0;al., 2016b</xref>). These systems excel in controlled environments, offering high accuracy in capturing minute morphological changes. The articulated arm and fixed-post mounts commonly used in CCP setups further enhance precision by allowing the sensors to maintain consistent positioning and scanning parameters, enabling repeatable and reliable measurements.</p>
<p>Advancements in sensor carrier platforms, such as drones and wheeled robots, have revolutionized data collection in the field. Drone-mounted LiDAR and photogrammetry systems have enabled rapid large-scale data acquisition, providing high-resolution 3D maps of entire fields within minutes (<xref ref-type="bibr" rid="B36">Gano et&#xa0;al., 2024</xref>). Similarly, wheeled and treaded robots equipped with terrestrial laser scanners offer detailed ground-level 3D imaging, making it possible to capture lower canopy structures and root zone traits that are often missed by aerial platforms (<xref ref-type="bibr" rid="B51">Iqbal et&#xa0;al., 2020b</xref>). These platforms improve spatial coverage and allow real-time data collection, offering insights into crop growth dynamics.</p>
<p>One critical advancement that has emerged is the fusion of multi-sensor system data, where 3D imaging technologies are combined with other data modalities, such as hyperspectral or thermal imaging (<xref ref-type="bibr" rid="B25">Dilmurat et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B49">Huang et&#xa0;al., 2018</xref>). This data fusion enables a more holistic view of crop performance, allowing researchers to correlate 3D structural data with physiological traits, such as leaf chlorophyll content or water stress, leading to more accurate phenotyping. Additionally, improved algorithms for data processing, particularly in handling large datasets from field deployments, are helping to overcome issues of noise and occlusion, resulting in cleaner and more interpretable data (<xref ref-type="bibr" rid="B58">Kahraman and Bacher, 2021</xref>).</p>
<p>The adoption of AI-driven image analysis tools and machine learning algorithms has further refined the ability to extract meaningful insights from 3D phenotypic data sets. These technologies automate the classification of complex traits, reduce human error, and enable high-throughput phenotyping at previously unattainable scales (<xref ref-type="bibr" rid="B33">Feng et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B90">Lu et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B165">Zhang et&#xa0;al., 2021</xref>). As these tools continue to develop, their integration into both CCP and FCP systems will be critical for accelerating the speed and accuracy of crop trait evaluation.</p>
</sec>
<sec id="s5_4">
<label>5.4</label>
<title>Error sources and mitigation strategies</title>
<p>Understanding the sources of measurement errors in 3D phenotyping is essential for appropriate technology selection, experimental design, and result interpretation. Although error sources have been discussed throughout the technology-specific sections of this review, a unified taxonomy provides a framework for systematic comparisons and targeted mitigation. Following the categorization approach of <xref ref-type="bibr" rid="B43">Harandi et&#xa0;al. (2023)</xref> and <xref ref-type="bibr" rid="B110">Paulus (2019)</xref>, we classified the error sources into three primary categories: sensor-intrinsic, scene-related, and environment-related errors.</p>
<sec id="s5_4_1">
<label>5.4.1</label>
<title>Sensor-intrinsic errors</title>
<p>Sensor-intrinsic errors arise from the fundamental limitations of sensing hardware and associated signal processing. These include spatial resolution limits, as each technology has characteristic resolution constraints, such as micrometer-level for laser triangulation, millimeter-level for structured light and high-end MVS, and centimeter-level for ToF and field-deployed TLS. These limits define the minimum feature size that can be reliably detected and directly constrain the traits accessible to each of the technologies. Depth accuracy and noise: Depth measurements are subject to systematic biases and random noise that vary with distance, surface orientation, and the material properties. ToF cameras exhibit characteristic &#x201c;flying pixel&#x201d; artifacts at depth discontinuities (<xref ref-type="bibr" rid="B61">Kazmi et&#xa0;al., 2014</xref>), whereas laser triangulation systems exhibit depth-dependent accuracy variations within their operational range (<xref ref-type="bibr" rid="B112">Paulus et&#xa0;al., 2014</xref>). Calibration errors: Multi-camera MVS systems and structured light projector-camera pairs require precise geometric calibration; residual calibration errors propagate through the reconstruction pipeline, causing systematic distortions that may exceed sensor noise in poorly calibrated systems.</p>
</sec>
<sec id="s5_4_2">
<label>5.4.2</label>
<title>Scene-related errors</title>
<p>Scene-related errors arise from interactions between the sensing modality and the characteristics of plant targets: Occlusion and incompleteness: Plant self-occlusion is ubiquitous, with leaves, stems, and reproductive structures blocking sensor views to underlying structures. This affects all technologies but is particularly problematic for single-viewpoint sensing applications. The severity of this phenomenon depends on the plant architecture, growth stage, and viewing geometry. Surface optical properties: Leaf reflectance varies with chlorophyll content, surface texture, and moisture status, affecting laser triangulation accuracy (<xref ref-type="bibr" rid="B27">Dupuis et&#xa0;al., 2015</xref>) and MVS feature-matching success. Specular (shiny) and translucent surfaces present particular challenges, potentially causing systematic depth errors or reconstruction failures. Texture and feature availability: Passive MVS techniques require detectable surface features for correspondence matching to be effective. Uniform, texture-less surfaces, which are common on young leaves, stems, and some fruits, can cause sparse or failed reconstruction in the affected regions. Plant motion during acquisition: Non-rigid plant motion during scanning violates the static scene assumptions underlying most reconstruction algorithms. Even subtle motion between frames can cause blurring, misalignment, or spurious geometries in the resulting point cloud.</p>
</sec>
<sec id="s5_4_3">
<label>5.4.3</label>
<title>Environment-related errors</title>
<p>Environment-related errors reflect the influence of external conditions on sensing performance. Ambient lighting: Structured light and ToF cameras are highly sensitive to ambient illumination, with direct sunlight capable of completely overwhelming projected patterns or modulated signals. Even passive MVS experiences reduced accuracy under variable lighting owing to feature matching inconsistencies (<xref ref-type="bibr" rid="B109">Paturkar et&#xa0;al., 2019</xref>). Wind effects: Wind-induced plant motion represents a primary environmental constraint for field phenotyping, affecting all technologies that require temporal integration (scanning systems) or multi-image acquisition (MVS). The severity is scaled according to wind speed, plant flexibility, and acquisition duration. Atmospheric conditions: Dust, fog, rain, and humidity affect laser propagation and camera optics, introducing noise or complete sensing failure under adverse conditions. These effects are generally more severe for active sensing modalities that rely on laser return signals.</p>
<p><xref ref-type="table" rid="T5"><bold>Table&#xa0;5</bold></xref> provides a systematic mapping of error source severity across the six technologies reviewed, synthesizing the technology-specific discussions in <italic>Section 2</italic>. This framework can guide technology selection based on the anticipated operating conditions and inform the design of mitigation strategies, such as multi-viewpoint acquisition, environmental controls, or robust reconstruction algorithms.</p>
<table-wrap id="T5" position="float">
<label>Table&#xa0;5</label>
<caption>
<p>Error source severity by technology and environment.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Error category</th>
<th valign="middle" align="center">Error source</th>
<th valign="middle" align="center">LTS</th>
<th valign="middle" align="center">MVS</th>
<th valign="middle" align="center">ToF</th>
<th valign="middle" align="center">TLS</th>
<th valign="middle" align="center">SL</th>
<th valign="middle" align="center">LF</th>
<th valign="middle" align="center">CCP impact</th>
<th valign="middle" align="center">FCP impact</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" rowspan="3" align="center">Sensor-intrinsic</td>
<td valign="middle" align="left">Spatial resolution limits</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">High</td>
</tr>
<tr>
<td valign="middle" align="left">Depth noise/accuracy</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">Med</td>
</tr>
<tr>
<td valign="middle" align="left">Calibration sensitivity</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
</tr>
<tr>
<td valign="middle" rowspan="3" align="center">Scene-related</td>
<td valign="middle" align="left">Occlusion/incompleteness</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">High</td>
</tr>
<tr>
<td valign="middle" align="left">Surface reflectance effects</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
</tr>
<tr>
<td valign="middle" align="left">Texture-less surfaces</td>
<td valign="middle" align="left">N/A</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">N/A</td>
<td valign="middle" align="left">N/A</td>
<td valign="middle" align="left">N/A</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
</tr>
<tr>
<td valign="middle" align="left"/>
<td valign="middle" align="left">Plant motion blur</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">High</td>
</tr>
<tr>
<td valign="middle" rowspan="3" align="center">Environment-related</td>
<td valign="middle" align="left">Ambient light interference</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">High</td>
</tr>
<tr>
<td valign="middle" align="left">Wind-induced motion</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">High</td>
</tr>
<tr>
<td valign="middle" align="left">Atmospheric conditions</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Med</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">Low</td>
<td valign="middle" align="left">Med</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>High, Major limitation, significant accuracy degradation</p></fn>
<fn>
<p>Med, Moderate impact, mitigation typically required</p></fn>
<fn>
<p>Low, Minor impact, generally manageable</p></fn>
<fn>
<p>N/A, Not applicable to this technology</p></fn>
<fn>
<p>CCP/FCP, Impact indicates typical severity in each environment</p></fn>
<fn>
<p>Severity ratings synthesize published validation studies and reflect typical operating conditions. Actual impact varies with specific sensor models, plant species, and experimental protocols. Technologies with active illumination (LTS, TLS) are generally less affected by ambient light but more affected by surface optical properties.</p></fn>
</table-wrap-foot>
</table-wrap>
</sec>
</sec>
<sec id="s5_5">
<label>5.5</label>
<title>Technology integration</title>
<p>Rapid advancements in imaging technologies, such as SL, LF cameras, TLS, ToF cameras, and multiview stereo, have individually contributed significantly to plant phenotyping. Each of these technologies, as discussed in the previous sections, offers unique capabilities that address different aspects of plant analysis, from capturing detailed 3D structures to measuring precise distances and reflecting plant morphology in diverse environments. However, the true potential of these technologies can be realized when they are combined, creating a synergistic approach that enhances the accuracy, resolution, and efficiency of phenotyping.</p>
<p>The integration of these phenotyping technologies can allow researchers to leverage the strengths of each approach and compensate for the limitations of the others. For instance, although ToF cameras provide accurate distance measurements and are effective in dynamic environments (<xref ref-type="bibr" rid="B61">Kazmi et&#xa0;al., 2014</xref>; <xref ref-type="bibr" rid="B134">Song et&#xa0;al., 2011</xref>), they might lack the fine details captured by LF Cameras (<xref ref-type="bibr" rid="B115">Polder and Hofstee, 2014</xref>). Conversely, LF Cameras excel in capturing intricate details and enabling post-capture refocusing, but they may not perform as well in large-scale field applications, where TLS provides broader coverage and robust 3D mapping (<xref ref-type="bibr" rid="B127">Schima et&#xa0;al., 2016</xref>).</p>
<p>By combining these technologies, researchers can establish a comprehensive phenotyping pipeline. For example, TLS can create large-scale, high-resolution 3D models of entire plant canopies or fields, capturing structural data at the macro level. This data can then be complemented by the fine-scale detail obtained from LF Cameras, which can focus on individual plants or specific traits within the canopy. Additionally, the integration of ToF cameras allows for real-time data collection in dynamic environments, making it possible to monitor changes in plant phenotypes as they occur.</p>
<p>The combined use of these imaging technologies opens new possibilities for controlled indoor phenotyping and large-scale field studies. In controlled environments, such as greenhouses or growth chambers, the integration of SL with Multiview Stereo systems can facilitate a detailed analysis of plant structures, including leaf morphology, stem thickness, and flower development (<xref ref-type="bibr" rid="B100">Nguyen et&#xa0;al., 2016a</xref>). The combination of TLS and ToF depth cameras is particularly powerful in field applications. TLS can provide detailed 3D models of plant populations across entire fields, whereas ToF depth cameras can capture dynamic changes in plant growth and responses to environmental conditions over time (<xref ref-type="bibr" rid="B129">Shafiekhani et&#xa0;al., 2017</xref>). This integration allows for the monitoring of large-scale phenotypic traits, such as canopy height, biomass distribution, and spatial variability, within a crop field. Moreover, these combined datasets can be fed into machine learning models to predict yield, assess stress responses and guide precision agriculture practices (<xref ref-type="bibr" rid="B129">Shafiekhani et&#xa0;al., 2017</xref>).</p>
<p>Although integrating these advanced imaging technologies offers significant benefits, it also presents challenges. One of the main challenges is the need for sophisticated data fusion techniques that combine datasets from different modalities into coherent and interpretable models. Differences in spatial resolution, data formats, and temporal scales must be reconciled to ensure an accurate and meaningful analysis. Additionally, the high volume of data generated by these combined technologies requires efficient processing and storage solutions, as well as robust algorithms for extracting relevant phenotypic information.</p>
</sec>
<sec id="s5_6">
<label>5.6</label>
<title>Remaining challenges: the scalability-accuracy trade-off and phenotyping prioritization</title>
<sec id="s5_6_1">
<label>5.6.1</label>
<title>The central trade-off</title>
<p>The analysis presented in this review converges on a fundamental tension that pervades 3D plant phenotyping: the inverse relationship between measurement precision and operational scalability. This scalability-accuracy trade-off is not merely a technical limitation but represents a core design constraint that shapes technology selection, experimental design, and the scope of phenotypic questions that can be addressed practically.</p>
<p>At one extreme, laboratory-based laser triangulation systems achieve micrometer-level precision suitable for detecting subtle morphological differences; however, their throughput is limited to individual plants scanned over several minutes. At the other extreme, UAV-based photogrammetry can survey hundreds of hectares daily, but the achievable precision degrades to centimeters, which is adequate for canopy-level traits but insufficient for organ-level trait characterization. Between these extremes lies a continuum of technology-platform combinations, each representing a specific position on the scalability-accuracy curve (<xref ref-type="fig" rid="f9"><bold>Figure&#xa0;9</bold></xref>). The critical insight is that no single system can simultaneously maximize both dimensions; rather, practitioners must select technologies that match their specific precision requirements and throughput constraints.</p>
<fig id="f9" position="float">
<label>Figure&#xa0;9</label>
<caption>
<p>Scalability-accuracy trade-off in 3D plant phenotyping, technology-platform combination across CCP and FCP environments.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1731852-g009.tif">
<alt-text content-type="machine-generated">Scatter plot showing spatial accuracy versus throughput/scalability for different technologies used in CCP (blue circles) and FCP (green squares) domains. The x-axis represents throughput in plants per day or square meters per hour, and the y-axis represents spatial accuracy in micrometers to centimeters. Key technologies include Laser Triangulation, Structured Light, Terrestrial LiDAR, Multiview Stereo, Time-of-Flight, and Light Field. The plot highlights mechanistic modeling, genetic analysis, yield prediction, and crop monitoring areas. A red dashed line indicates the trade-off frontier.</alt-text>
</graphic></fig>
</sec>
<sec id="s5_6_2">
<label>5.6.2</label>
<title>Objective-based technology selection framework</title>
<p>To operationalize this trade-off, we propose a decision framework that maps common phenotyping objectives to the recommended technology and platform combinations (<xref ref-type="table" rid="T6"><bold>Table&#xa0;6</bold></xref>). This framework recognizes that different research questions and stakeholder needs require different positions on the scalability-accuracy continuum.</p>
<table-wrap id="T6" position="float">
<label>Table&#xa0;6</label>
<caption>
<p>Objective-based technology selection framework for 3D phenotyping.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Objective</th>
<th valign="middle" align="center">Priority traits</th>
<th valign="middle" align="center">Precision required</th>
<th valign="middle" align="center">Throughput required</th>
<th valign="middle" align="center">Recommended technology</th>
<th valign="middle" align="center">Recommended platform</th>
<th valign="middle" align="center">Environment (CCP/FCP)</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="center">Genetic mapping/QTL analysis</td>
<td valign="middle" align="center">Height, biomass, flowering time, canopy cover</td>
<td valign="middle" align="center">cm-level</td>
<td valign="middle" align="center">High (&gt;500 plots)</td>
<td valign="middle" align="center">MVS, TLS</td>
<td valign="middle" align="center">UAV, mobile ground</td>
<td valign="middle" align="center">FCP</td>
</tr>
<tr>
<td valign="middle" align="center">Yield prediction</td>
<td valign="middle" align="center">Height, LAI, canopy volume, NDVI</td>
<td valign="middle" align="center">cm-level</td>
<td valign="middle" align="center">High (field-scale)</td>
<td valign="middle" align="center">MVS, TLS</td>
<td valign="middle" align="center">UAV</td>
<td valign="middle" align="center">FCP</td>
</tr>
<tr>
<td valign="middle" align="center">Growth rate monitoring</td>
<td valign="middle" align="center">Height change, leaf area expansion</td>
<td valign="middle" align="center">cm-level</td>
<td valign="middle" align="center">Medium-High (daily)</td>
<td valign="middle" align="center">TLS, MVS</td>
<td valign="middle" align="center">Ground robot, Fixed gantry, UAV</td>
<td valign="middle" align="center">CCP, FCP</td>
</tr>
<tr>
<td valign="middle" align="center">Stress detection (acute)</td>
<td valign="middle" align="center">Wilting, leaf angle, canopy temperature</td>
<td valign="middle" align="center">cm-level</td>
<td valign="middle" align="center">High (hourly-daily)</td>
<td valign="middle" align="center">ToF, MVS, TLS</td>
<td valign="middle" align="center">Fixed gantry, ground robot</td>
<td valign="middle" align="center">CCP preferred</td>
</tr>
<tr>
<td valign="middle" align="center">Stress detection (chronic)</td>
<td valign="middle" align="center">Height reduction, biomass, senescence</td>
<td valign="middle" align="center">cm-level</td>
<td valign="middle" align="center">Medium</td>
<td valign="middle" align="center">MVS, TLS</td>
<td valign="middle" align="center">UAV, mobile ground</td>
<td valign="middle" align="center">FCP</td>
</tr>
<tr>
<td valign="middle" align="center">Organ-level morphology</td>
<td valign="middle" align="center">Leaf dimensions, stem diameter, internode length</td>
<td valign="middle" align="center">mm-level</td>
<td valign="middle" align="center">Low (&lt;100 plants)</td>
<td valign="middle" align="center">LTS, SL, MVS</td>
<td valign="middle" align="center">Articulated arm, turntable</td>
<td valign="middle" align="center">CCP</td>
</tr>
<tr>
<td valign="middle" align="center">Mechanistic modeling</td>
<td valign="middle" align="center">Detailed architecture, organ geometry</td>
<td valign="middle" align="center">&#xb5;m&#x2013;mm level</td>
<td valign="middle" align="center">Very Low</td>
<td valign="middle" align="center">LTS, MVS (multi-view)</td>
<td valign="middle" align="center">Turntable, articulated arm</td>
<td valign="middle" align="center">CCP</td>
</tr>
<tr>
<td valign="middle" align="center">Lodging assessment</td>
<td valign="middle" align="center">Canopy angle, height heterogeneity</td>
<td valign="middle" align="center">cm-level</td>
<td valign="middle" align="center">High</td>
<td valign="middle" align="center">MVS, TLS</td>
<td valign="middle" align="center">UAV</td>
<td valign="middle" align="center">FCP</td>
</tr>
<tr>
<td valign="middle" align="center">Root phenotyping</td>
<td valign="middle" align="center">Root architecture, depth distribution</td>
<td valign="middle" align="center">mm-level</td>
<td valign="middle" align="center">Low</td>
<td valign="middle" align="center">CT, MRI (CCP); GPR (FCP)</td>
<td valign="middle" align="center">Specialized systems</td>
<td valign="middle" align="center">CCP, FCP</td>
</tr>
<tr>
<td valign="middle" align="center">Commercial crop monitoring</td>
<td valign="middle" align="center">Canopy health, uniformity, biomass</td>
<td valign="middle" align="center">cm-level</td>
<td valign="middle" align="center">Very High</td>
<td valign="middle" align="center">MVS</td>
<td valign="middle" align="center">UAV (consumer)</td>
<td valign="middle" align="center">FCP</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>Precision categories: &#xb5;m, micrometer (organ detail); mm, millimeter (organ-level); cm, centimeter (plant/canopy level). Throughput categories: Very Low (&lt;50 plants/day), Low (&lt;100), Medium (100&#x2013;500), High (&gt;500 plots/day), Very High (commercial field scale). Recommendations represent general guidance; optimal selection depends on specific experimental design, available resources, and local conditions.</p></fn>
</table-wrap-foot>
</table-wrap>
<p>For genetic analysis and QTL mapping, where the goal is to detect phenotypic differences between genotypes, moderate precision is typically sufficient, as genetic effects manifest as population-level differences rather than individual-plant variation. UAV-based MVS or mobile TLS platforms provide adequate accuracy for most canopy-level traits while enabling the throughput necessary for statistically powered genetic studies (hundreds to thousands of plots).</p>
<p>For yield prediction and crop modeling, the emphasis shifts toward canopy-level traits (height, LAI, and biomass) that correlate with the final yield. UAV platforms excel in this regard, with daily or weekly acquisitions enabling time-series analyses that capture growth dynamics. The reduced precision of aerial systems is acceptable because yield prediction models typically operate at the plot or field scale.</p>
<p>For stress detection and response characterization, temporal resolution often outweighs spatial resolution. The ability to capture rapid physiological responses (e.g., wilting, leaf angle changes, and growth rate alterations) requires measurement frequencies that may only be achievable with fixed gantry systems or continuous-monitoring robotic platforms. Waiting for weekly UAV surveys may result in the omission of critical stress events.</p>
<p>For mechanistic modeling and physiological research, organ-level measurements (leaf dimensions, stem architecture, and reproductive structure counts) require high-precision, which is achievable only in controlled environments. These applications accept low throughput as a necessary trade-off for the detailed measurements required to parameterize and validate physiological models.</p>
</sec>
<sec id="s5_6_3">
<label>5.6.3</label>
<title>Trait prioritization in field conditions</title>
<p>Beyond technology selection, the scalability-accuracy trade-off raises fundamental questions about which traits should be measured under field conditions. Not all traits measurable in controlled environments can or should be measured on the field scale. The decision of which traits to prioritize involves balancing the measurement feasibility with biological and practical relevance.</p>
<p>For crops with dense canopies (soybean, rice, and maize at late growth stages), individual plant measurements may be both technically challenging due to occlusion and practically unnecessary if plot-level traits capture the relevant variation. Canopy-level measurements&#x2014;height, cover, LAI, and biomass indices&#x2014;may provide equivalent predictive power for breeding selection while requiring dramatically less measurement effort.</p>
<p>Conversely, some traits that are difficult to measure directly can be inferred from more accessible measurements. Biomass, for example, can be estimated from height and canopy volume with sufficient accuracy for many applications, avoiding the need for destructive sampling. Similarly, stress responses may be detectable through canopy structural changes before they manifest as yield differences.</p>
</sec>
<sec id="s5_6_4">
<label>5.6.4</label>
<title>Stakeholder-specific considerations</title>
<p>Different stakeholders operate at different positions on the scalability-accuracy continuum based on their specific needs. For production-scale farmers, detailed organ-level measurements across entire fields are neither feasible nor necessary, and actionable information regarding crop health, growth uniformity, and stress occurrence can be derived from canopy-level observations. Sampling strategies and detailed measurements of representative plots or plants while surveying the broader field at a lower resolution may provide the most practical approach.</p>
<p>For plant breeders, priorities vary according to the experimental stage. Early generation selection in controlled environments may require detailed measurements to identify subtle trait differences, whereas advanced yield trials under field conditions may prioritize throughput to evaluate large numbers of lines across multiple environments. Understanding these stage-specific requirements is essential for designing phenotyping workflows that meet the needs of breeding program.</p>
</sec>
<sec id="s5_6_5">
<label>5.6.5</label>
<title>Bridging technology and application</title>
<p>A persistent gap exists between technological development and agricultural applications. Engineers and data scientists developing phenotyping systems may not fully understand which traits are most relevant for breeding or agronomy, whereas breeders and agronomists may not appreciate the technical constraints that determine what is measurable at different scales. Addressing this gap requires sustained interdisciplinary collaboration to ensure that phenotyping tools are aligned with actual user needs, rather than technical capabilities alone.</p>
</sec>
<sec id="s5_6_6">
<label>5.6.6</label>
<title>Remaining technical challenges</title>
<p>In addition to the scalability-accuracy trade-off, significant technical challenges persist. Environmental factors, such as wind, rain, dust, and variable lighting, continue to degrade data quality under field conditions. Platform stability on uneven terrain affects sensor precision. The computational demands for processing terabytes of 3D data strain the available infrastructure. Although sensor costs are decreasing, they remain prohibitive for many potential users. Data interoperability between different sensor systems and analysis pipelines limits integration. The challenge of scaling from plot-level experiments to farm-level implementation remains largely unresolved. These challenges represent active areas of research and development, with advances in each area incrementally expanding the practical envelope of field-based, 3D phenotyping.</p>
</sec>
</sec>
<sec id="s5_7">
<label>5.7</label>
<title>Future perspectives in 3D field crop phenotyping</title>
<p>The future of 3D crop phenotyping is rapidly evolving, with emerging technologies that promise to address existing challenges and expand the potential of phenotyping systems. Soft robots and sensors represent a breakthrough in flexibility, adaptability, and safety in handling delicate plants. Unlike rigid robots, soft robots constructed from flexible materials can move fluidly through dense crops and gently interact with plants without causing damage (<xref ref-type="bibr" rid="B21">Del Dottore et&#xa0;al., 2024</xref>). For example, soft robotic arms can be equipped with soft sensors to measure traits such as leaf thickness, stem strength, and fruit ripeness, providing valuable phenotypic data without harming plants. These systems are particularly well-suited for environments where traditional robotic systems might struggle, such as in tight or uneven planting arrangements.</p>
<p>Simultaneously, the development of quadruped robots is transforming data collection in challenging field environments. Unlike wheeled or treaded robots, quadrupeds can navigate rugged terrains, such as hilly fields or areas with dense vegetation, where other robotic platforms experience mobility constraints. These four-legged robots offer stability, precision, and flexibility, allowing them to carry sensors into areas that are otherwise difficult to access (<xref ref-type="bibr" rid="B86">Lopes et&#xa0;al., 2023</xref>). Equipped with 3D imaging systems, quadruped robots can collect detailed data on plant architecture, leaf orientation, and canopy structures across varying terrains. This technology opens the door to more comprehensive data collection in real-world agricultural settings, particularly in locations where traditional wheeled robots cannot operate efficiently.</p>
<p>An exciting frontier is the synchronization of drones and ground robots for more coordinated and efficient phenotyping. Drone-ground robot synchronization allows for real-time collaboration between aerial and ground-based sensor platforms, combining the strengths of both systems (<xref ref-type="bibr" rid="B17">Chai et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B42">G&#xfc;ler and Y&#x131;ld&#x131;r&#x131;m, 2023</xref>). For example, drones could provide a high-level overview of the field, capturing large-scale 3D data on canopy structure and spatial variability, while ground robots can perform close-up measurements on individual plants, focusing on more detailed traits such as stem diameter, fruit size, or root exposure. By working in sync, these systems can collect multi-scale phenotypic data more efficiently, covering larger areas while maintaining the precision required for detailed trait analysis. The integration of real-time feedback loops between drones and ground robots also enhances the ability to optimize data collection strategies, dynamically adjusting sensor positioning or targeting specific areas of interest (<xref ref-type="bibr" rid="B17">Chai et&#xa0;al., 2024</xref>).</p>
<p>The emergence of digital twins for automated real-time 3D field plant phenotyping has added to this technological horizon. Digital twins, which are virtual replicas of physical systems, enable the real-time modeling and analysis of plant growth and behavior under various scenarios. Leveraging advanced 3D functional plant modeling frameworks (<xref ref-type="bibr" rid="B96">Mitsanis et&#xa0;al., 2024</xref>), these systems integrate phenotypic data, environmental conditions, and predictive models to dynamically simulate and monitor plant development. The applications of digital twins extend to understanding genotype-by-environment interactions, stress response prediction, and optimizing crop management strategies. Recent studies, such as those by <xref ref-type="bibr" rid="B83">Liu et&#xa0;al. (2024)</xref>, have demonstrated how functional-structural plant modeling can form the basis of digital twins by combining phenotypic traits and environmental data to enhance decision-making in crop breeding and precision agriculture. By offering a platform for continuous monitoring and virtual experimentation, digital twins are poised to bridge the gap between research and practical applications, making them a transformative tool in field-based phenotyping.</p>
<p>Another critical advancement is 3D spectral fusion, which goes beyond traditional geometry-based phenotyping by combining 3D structural data with spectral information obtained from hyperspectral and multispectral imaging. This fusion of data modalities allows researchers to capture both the morphological characteristics of plants (e.g., plant height and canopy shape) and their physiological status (e.g., nutrient levels, chlorophyll content and water stress) (<xref ref-type="bibr" rid="B25">Dilmurat et&#xa0;al., 2022</xref>). By integrating geometry with spectral data, 3D spectral fusion provides a more holistic understanding of plant health and performance, enabling the identification of subtle stress indicators that may not be detectable using geometry alone. This approach has the potential to revolutionize the monitoring and management of crops, offering precise multidimensional insights into plant responses to environmental factors, diseases, and nutrient availability.</p>
<p>In addition to these technological advancements, edge computing is expected to play a transformative role in real-time data processing for field phenotyping. As the volume of data generated by 3D imaging systems grows exponentially, particularly in large-scale field applications, the ability to process data at the source rather than transferring raw data to the cloud will be critical. Edge computing allows localized data processing near the point of collection, enabling real-time analysis and reducing the overhead associated with transferring large datasets to the cloud for storage and analysis (<xref ref-type="bibr" rid="B138">Syu et&#xa0;al., 2023</xref>). This approach minimizes bandwidth usage, accelerates decision-making, and ensures that only relevant filtered data are sent to cloud systems for further processing.</p>
<p>The emergence of high-performance miniaturized hardware, such as edge computing devices produced by commercial entities (<xref ref-type="bibr" rid="B126">Scalcon et&#xa0;al., 2024</xref>), has made it feasible to perform complex computations at the edge. These low-power devices, equipped with AI-powered processors and GPU acceleration, can run machine learning models directly on drones, robots, or field stations, enabling real-time image analysis, trait detection, and anomaly identification (<xref ref-type="bibr" rid="B126">Scalcon et&#xa0;al., 2024</xref>). By processing data on-site, these systems can generate immediate insights into plant health, growth, and performance, which is especially useful for farmers or breeders who need to make quick, informed decisions about interventions such as watering, fertilization, or pest control.</p>
<p>Edge computing also addresses the scalability challenges associated with processing terabytes of 3D data across large areas. In situations where cloud connectivity may be limited, such as in remote or rural farming locations, edge devices can function independently, ensuring that data collection and processing continue without interruption. Additionally, edge computing offers enhanced data security because sensitive crop data can be processed and stored locally, reducing the risk of data breaches associated with cloud-based systems (<xref ref-type="bibr" rid="B138">Syu et&#xa0;al., 2023</xref>).</p>
<p>The combination of cloud platforms and edge computing provides a balanced solution for large-scale phenotyping. While cloud computing is essential for long-term storage, cross-field comparisons, and advanced analytics, edge computing optimizes on-the-fly processing and enables real-time action in the field. This hybrid approach ensures that phenotyping systems are efficient and practical for large-scale agricultural operations. Moreover, with emerging communication and processing architectures, such as edge learning for B5G (also known as 5G) networks with distributed signal processing demonstrated by <xref ref-type="bibr" rid="B156">Xu et&#xa0;al. (2023)</xref>, semantic communication, edge computing, and wireless sensing are now possible over geographically dispersed edge nodes while minimizing the need for frequent data exchange.</p>
<p>In addition to new technologies, collaborative efforts between engineers, plant breeders, and agronomists will be crucial for refining and implementing 3D phenotyping systems. Engineers and computer scientists will need to work closely with breeders and agronomists to ensure that the technologies developed are relevant to the needs of real-world crop management and breeding programs. Interdisciplinary research will also help address the knowledge gap between technology developers and end users, ensuring that innovations in phenotyping technology are grounded in practical, field-relevant applications. This collaboration can guide the prioritization of critical traits that should be measured in different crops and environments, ensuring that the developed systems are both effective and efficient.</p>
<p>To realize the full potential of these innovations, scalability and standardization are essential. As technologies such as soft robotics, drone-ground robot synchronization, 3D spectral fusion, and edge computing continue to evolve, they must be adapted for large-scale applications in commercial farming. This requires advancements in sensor miniaturization, power efficiency, and real-time data processing, allowing these systems to be deployed over vast areas without compromising data quality. Additionally, the development of standardized protocols for sensor calibration, data collection, and&#xa0;analysis is necessary to ensure the reproducibility and interoperability of phenotyping tools across different research groups and agricultural systems.</p>
</sec>
</sec>
<sec id="s6">
<label>6</label>
<title>Concluding remarks</title>
<p>This review explores significant advancements in 3D crop phenotyping technologies, emphasizing their roles in chamber crop phenotyping (CCP) and field crop phenotyping (FCP). While CCP offers precision and control in data collection, FCP provides the advantage of real-world applicability, addressing the complex environmental variability that crops face in actual agricultural settings. Together, these approaches form a complementary framework essential for high-throughput phenotyping and the development of resilient and high-performing crop varieties.</p>
<p>The introduction of advanced 3D sensing systems, such as TLS, LTS, SL and ToF cameras, has greatly enhanced our ability to capture detailed morphological and physiological traits. Furthermore, the integration of multi-sensor platforms and spectral fusion techniques has and is expected to further allow researchers to go beyond simple geometric measurements, offering a deeper understanding of plant health and performance than previously possible. When combined with AI-driven tools and machine learning algorithms, these technologies are pushing the boundaries of what can be achieved in automated phenotyping.</p>
<p>Despite these advancements, there are still significant challenges. Field phenotyping continues to grapple with environmental interference, sensor stability, and the sheer volume of data generated in large-scale applications. Addressing these challenges will require a combination of edge computing for localized, real-time data processing and cloud platforms for large-scale data storage and analysis. The emergence of high-performance miniaturized hardware by commercial entities will play a crucial role in ensuring that data processing becomes more efficient, scalable, and feasible for real-world applications in the future.</p>
<p>Looking forward, the continued development of autonomous robotic systems, soft sensors, and drone-ground synchronization will further enhance the precision, flexibility, and scalability of 3D phenotyping. As these technologies evolve, close collaboration between engineers, breeders, and agronomists will be vital to ensure that phenotyping tools are tailored to the practical needs of crop breeding and management programs in the future.</p>
<p>Ultimately, the future of 3D phenotyping lies in the ability to merge advanced technology with field applicability, enabling scalable, precise, and actionable insights into modern agriculture. As we overcome the current limitations and harness the potential of emerging innovations, 3D phenotyping will become a cornerstone of precision agriculture, driving sustainable improvements in crop yield, resilience, and food security.</p>
</sec>
</body>
<back>
<sec id="s7" sec-type="author-contributions">
<title>Author contributions</title>
<p>EO: Conceptualization, Visualization, Writing &#x2013; original draft. EP: Writing &#x2013; review &amp; editing. DS: Conceptualization, Writing &#x2013; review &amp; editing. RJ: Conceptualization, Writing &#x2013; review &amp; editing. BC: Conceptualization, Funding acquisition, Project administration, Supervision, Writing &#x2013; review &amp; editing.</p></sec>
<sec id="s9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The author(s) declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p></sec>
<sec id="s10" sec-type="ai-statement">
<title>Generative AI statement</title>
<p>The author(s) declared that generative AI was not used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If&#xa0;you identify any issues, please contact us.</p></sec>
<sec id="s11" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors&#xa0;and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p></sec>
<ref-list>
<title>References</title>
<ref id="B1">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Alenya</surname> <given-names>G.</given-names></name>
<name><surname>Dellen</surname> <given-names>B.</given-names></name>
<name><surname>Torras</surname> <given-names>C.</given-names></name>
</person-group> (<year>2011</year>). &#x201c;
<article-title>3D modelling of leaves from color and ToF data for robotized plant measuring</article-title>,&#x201d; in <conf-name>2011 IEEE International Conference on Robotics and Automation</conf-name>. (<publisher-loc>Piscataway, NJ</publisher-loc>: 
<publisher-name>IEEE</publisher-name>) <fpage>3408</fpage>&#x2013;<lpage>3414</lpage>.
</mixed-citation>
</ref>
<ref id="B2">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Araus</surname> <given-names>J. L.</given-names></name>
<name><surname>Cairns</surname> <given-names>J. E.</given-names></name>
</person-group> (<year>2014</year>). 
<article-title>Field high-throughput phenotyping: the new crop breeding frontier</article-title>. <source>Trends Plant Sci.</source> <volume>19</volume>, <fpage>52</fpage>&#x2013;<lpage>61</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.tplants.2013.09.008</pub-id>, PMID: <pub-id pub-id-type="pmid">24139902</pub-id>
</mixed-citation>
</ref>
<ref id="B3">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Araus</surname> <given-names>J. L.</given-names></name>
<name><surname>Kefauver</surname> <given-names>S. C.</given-names></name>
<name><surname>Vergara-D&#xed;az</surname> <given-names>O.</given-names></name>
<name><surname>Gracia-Romero</surname> <given-names>A.</given-names></name>
<name><surname>Rezzouk</surname> <given-names>F. Z.</given-names></name>
<name><surname>Segarra</surname> <given-names>J.</given-names></name>
<etal/>
</person-group>. (<year>2022</year>). 
<article-title>Crop phenotyping in a context of global change: What to measure and how to do it</article-title>. <source>J. Integr. Plant Biol.</source> <volume>64</volume>, <fpage>592</fpage>&#x2013;<lpage>618</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/jipb.13191</pub-id>, PMID: <pub-id pub-id-type="pmid">34807514</pub-id>
</mixed-citation>
</ref>
<ref id="B4">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Atefi</surname> <given-names>A.</given-names></name>
<name><surname>Ge</surname> <given-names>Y.</given-names></name>
<name><surname>Pitla</surname> <given-names>S.</given-names></name>
<name><surname>Schnable</surname> <given-names>J.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Robotic technologies for high-throughput plant phenotyping: Contemporary reviews and future perspectives</article-title>. <source>Front. Plant Sci.</source> <volume>12</volume>, <elocation-id>611940</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2021.611940</pub-id>, PMID: <pub-id pub-id-type="pmid">34249028</pub-id>
</mixed-citation>
</ref>
<ref id="B5">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Bai</surname> <given-names>G.</given-names></name>
<name><surname>Ge</surname> <given-names>Y.</given-names></name>
<name><surname>Scoby</surname> <given-names>D.</given-names></name>
<name><surname>Leavitt</surname> <given-names>B.</given-names></name>
<name><surname>Stoerger</surname> <given-names>V.</given-names></name>
<name><surname>Kirchgessner</surname> <given-names>N.</given-names></name>
<etal/>
</person-group>. (<year>2019</year>). 
<article-title>NU-Spidercam: A large-scale, cable-driven, integrated sensing and robotic system for advanced phenotyping, remote sensing, and agronomic research</article-title>. <source>Comput. Electron. Agric.</source> <volume>160</volume>, <fpage>71</fpage>&#x2013;<lpage>81</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2019.03.009</pub-id>
</mixed-citation>
</ref>
<ref id="B6">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Bao</surname> <given-names>Y.</given-names></name>
<name><surname>Tang</surname> <given-names>L.</given-names></name>
<name><surname>Breitzman</surname> <given-names>M. W.</given-names></name>
<name><surname>Salas Fernandez</surname> <given-names>M. G.</given-names></name>
<name><surname>Schnable</surname> <given-names>P. S.</given-names></name>
</person-group> (<year>2019</year>a). 
<article-title>Field-based robotic phenotyping of sorghum plant architecture using stereo vision</article-title>. <source>J. Field Robotics</source> <volume>36</volume>, <fpage>397</fpage>&#x2013;<lpage>415</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/rob.21830</pub-id>
</mixed-citation>
</ref>
<ref id="B7">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Bao</surname> <given-names>Y.</given-names></name>
<name><surname>Tang</surname> <given-names>L.</given-names></name>
<name><surname>Schnable</surname> <given-names>P. S.</given-names></name>
<name><surname>Fernandez</surname> <given-names>M. G. S.</given-names></name>
</person-group> (<year>2016</year>). &#x201c;
<article-title>Infield biomass sorghum yield component traits extraction pipeline using stereo vision</article-title>,&#x201d; in <conf-name>2016 ASABE Annual International Meeting</conf-name>, Vol. <volume>1</volume>. (<publisher-loc>St. Joseph, Michigan</publisher-loc>: 
<publisher-name>American Society of Agricultural and Biological Engineers (ASABE)</publisher-name>). 
</mixed-citation>
</ref>
<ref id="B8">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Bao</surname> <given-names>Y.</given-names></name>
<name><surname>Tang</surname> <given-names>L.</given-names></name>
<name><surname>Srinivasan</surname> <given-names>S.</given-names></name>
<name><surname>Schnable</surname> <given-names>P. S.</given-names></name>
</person-group> (<year>2019</year>b). 
<article-title>Field-based architectural traits characterisation of maize plant using time-of-flight 3D imaging</article-title>. <source>Biosyst. Eng.</source> <volume>178</volume>, <fpage>86</fpage>&#x2013;<lpage>101</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2018.11.005</pub-id>
</mixed-citation>
</ref>
<ref id="B9">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>B&#xe9;land</surname> <given-names>M.</given-names></name>
<name><surname>Widlowski</surname> <given-names>J.-L.</given-names></name>
<name><surname>Fournier</surname> <given-names>R. A.</given-names></name>
</person-group> (<year>2014</year>). 
<article-title>A model for deriving voxel-level tree leaf area density estimates from ground-based LiDAR</article-title>. <source>Environ. Model. Software</source> <volume>51</volume>, <fpage>184</fpage>&#x2013;<lpage>189</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.envsoft.2013.09.034</pub-id>
</mixed-citation>
</ref>
<ref id="B10">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Biskup</surname> <given-names>B.</given-names></name>
<name><surname>Scharr</surname> <given-names>H.</given-names></name>
<name><surname>Schurr</surname> <given-names>U.</given-names></name>
<name><surname>Rascher</surname> <given-names>U. W. E.</given-names></name>
</person-group> (<year>2007</year>). 
<article-title>A stereo imaging system for measuring structural parameters of plant canopies</article-title>. <source>Plant Cell Environ.</source> <volume>30</volume>, <fpage>1299</fpage>&#x2013;<lpage>1308</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/j.1365-3040.2007.01702.x</pub-id>, PMID: <pub-id pub-id-type="pmid">17727419</pub-id>
</mixed-citation>
</ref>
<ref id="B11">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Broxton</surname> <given-names>M.</given-names></name>
<name><surname>Flynn</surname> <given-names>J.</given-names></name>
<name><surname>Overbeck</surname> <given-names>R.</given-names></name>
<name><surname>Erickson</surname> <given-names>D.</given-names></name>
<name><surname>Hedman</surname> <given-names>P.</given-names></name>
<name><surname>Duvall</surname> <given-names>M.</given-names></name>
<etal/>
</person-group>. (<year>2020</year>). 
<article-title>Immersive light field video with a layered mesh representation</article-title>. <source>ACM Trans. Graphics (TOG)</source> <volume>39</volume>, <fpage>81</fpage>&#x2013;<lpage>86</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1145/3386569.3392485</pub-id>
</mixed-citation>
</ref>
<ref id="B12">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Bucksch</surname> <given-names>A.</given-names></name>
<name><surname>Atta-Boateng</surname> <given-names>A.</given-names></name>
<name><surname>Azihou</surname> <given-names>A. F.</given-names></name>
<name><surname>Battogtokh</surname> <given-names>D.</given-names></name>
<name><surname>Baumgartner</surname> <given-names>A.</given-names></name>
<name><surname>Binder</surname> <given-names>B. M.</given-names></name>
<etal/>
</person-group>. (<year>2017</year>). 
<article-title>Morphological plant modeling: unleashing geometric and topological potential within the plant sciences</article-title>. <source>Front. Plant Sci.</source> <volume>8</volume>, <elocation-id>900</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2017.00900</pub-id>, PMID: <pub-id pub-id-type="pmid">28659934</pub-id>
</mixed-citation>
</ref>
<ref id="B13">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Busemeyer</surname> <given-names>L.</given-names></name>
<name><surname>Klose</surname> <given-names>R.</given-names></name>
<name><surname>Linz</surname> <given-names>A.</given-names></name>
<name><surname>Thiel</surname> <given-names>M.</given-names></name>
<name><surname>Wunder</surname> <given-names>E.</given-names></name>
<name><surname>Ruckelshausen</surname> <given-names>A.</given-names></name>
<etal/>
</person-group>. (<year>2010</year>). &#x201c;
<article-title>Agro-sensor systems for outdoor plant phenotyping platforms in low and high density crop field plots</article-title>,&#x201d; in <conf-name>Proceedings 68th International Conference Agricultural Engineering</conf-name>. (<publisher-loc>D&#xfc;sseldorf, Germany</publisher-loc>: 
<publisher-name>VDI Verlag</publisher-name>) <fpage>213</fpage>&#x2013;<lpage>218</lpage>.
</mixed-citation>
</ref>
<ref id="B14">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Busemeyer</surname> <given-names>L.</given-names></name>
<name><surname>Mentrup</surname> <given-names>D.</given-names></name>
<name><surname>M&#xf6;ller</surname> <given-names>K.</given-names></name>
<name><surname>Wunder</surname> <given-names>E.</given-names></name>
<name><surname>Alheit</surname> <given-names>K.</given-names></name>
<name><surname>Hahn</surname> <given-names>V.</given-names></name>
<etal/>
</person-group>. (<year>2013</year>). 
<article-title>BreedVision&#x2014;A multi-sensor platform for non-destructive field-based phenotyping in plant breeding</article-title>. <source>Sensors</source> <volume>13</volume>, <fpage>2830</fpage>&#x2013;<lpage>2847</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s130302830</pub-id>, PMID: <pub-id pub-id-type="pmid">23447014</pub-id>
</mixed-citation>
</ref>
<ref id="B15">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Cai</surname> <given-names>Z.</given-names></name>
<name><surname>Jin</surname> <given-names>C.</given-names></name>
<name><surname>Xu</surname> <given-names>J.</given-names></name>
<name><surname>Yang</surname> <given-names>T.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Measurement of potato volume with laser triangulation and three-dimensional reconstruction</article-title>. <source>IEEE Access</source> <volume>8</volume>, <fpage>176565</fpage>&#x2013;<lpage>176574</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2020.3027154</pub-id>
</mixed-citation>
</ref>
<ref id="B16">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Cao</surname> <given-names>T.</given-names></name>
<name><surname>Panjvani</surname> <given-names>K.</given-names></name>
<name><surname>Dinh</surname> <given-names>A.</given-names></name>
<name><surname>Wahid</surname> <given-names>K.</given-names></name>
<name><surname>Bhowmik</surname> <given-names>P.</given-names></name>
</person-group> (<year>2017</year>). &#x201c;
<article-title>An approach to detect branches and seedpods based on 3D image in low-cost plant phenotyping platform</article-title>,&#x201d; in <conf-name>2017 IEEE 30th Canadian Conference on Electrical and Computer Engineering (CCECE)</conf-name>. (<publisher-loc>Piscataway, NJ</publisher-loc>: 
<publisher-name>IEEE</publisher-name>) <fpage>1</fpage>&#x2013;<lpage>4</lpage>.
</mixed-citation>
</ref>
<ref id="B17">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chai</surname> <given-names>R.</given-names></name>
<name><surname>Guo</surname> <given-names>Y.</given-names></name>
<name><surname>Zuo</surname> <given-names>Z.</given-names></name>
<name><surname>Chen</surname> <given-names>K.</given-names></name>
<name><surname>Shin</surname> <given-names>H.-S.</given-names></name>
<name><surname>Tsourdos</surname> <given-names>A.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Cooperative motion planning and control for aerial-ground autonomous systems: Methods and applications</article-title>. <source>Prog. Aerospace Sci.</source> <volume>146</volume>, <fpage>101005</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.paerosci.2024.101005</pub-id>
</mixed-citation>
</ref>
<ref id="B18">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Colombo</surname> <given-names>L.</given-names></name>
<name><surname>Marana</surname> <given-names>B.</given-names></name>
</person-group> (<year>2010</year>). 
<article-title>Terrestrial laser scanning</article-title>. <source>GIM Int.</source> <volume>24</volume>, <fpage>17</fpage>&#x2013;<lpage>20</lpage>.
</mixed-citation>
</ref>
<ref id="B19">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Dahiya</surname> <given-names>S.</given-names></name>
<name><surname>Kumar</surname> <given-names>S.</given-names></name>
<name><surname>Chaudhary</surname> <given-names>C.</given-names></name>
<name><surname>Chaudhary</surname> <given-names>C.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>Lodging: Significance and preventive measures for increasing crop production</article-title>. <source>Int. J. Chem. Stud.</source> <volume>6</volume>, <fpage>700</fpage>&#x2013;<lpage>705</lpage>.
</mixed-citation>
</ref>
<ref id="B20">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Deery</surname> <given-names>D. M.</given-names></name>
<name><surname>Rebetzke</surname> <given-names>G. J.</given-names></name>
<name><surname>Jimenez-Berni</surname> <given-names>J. A.</given-names></name>
<name><surname>Condon</surname> <given-names>A. G.</given-names></name>
<name><surname>Smith</surname> <given-names>D. J.</given-names></name>
<name><surname>Bechaz</surname> <given-names>K. M.</given-names></name>
<etal/>
</person-group>. (<year>2020</year>). 
<article-title>Ground-based LiDAR improves phenotypic repeatability of above-ground biomass and crop growth rate in wheat</article-title>. <source>Plant Phenomics</source>. <volume>2020</volume>, <fpage>8329798</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.34133/2020/8329798</pub-id>, PMID: <pub-id pub-id-type="pmid">33313565</pub-id>
</mixed-citation>
</ref>
<ref id="B21">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Del Dottore</surname> <given-names>E.</given-names></name>
<name><surname>Mondini</surname> <given-names>A.</given-names></name>
<name><surname>Rowe</surname> <given-names>N.</given-names></name>
<name><surname>Mazzolai</surname> <given-names>B.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>A growing soft robot with climbing plant&#x2013;inspired adaptive behaviors for navigation in unstructured environments</article-title>. <source>Sci. Robotics</source> <volume>9</volume>, <elocation-id>eadi5908</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1126/scirobotics.adi5908</pub-id>, PMID: <pub-id pub-id-type="pmid">38232147</pub-id>
</mixed-citation>
</ref>
<ref id="B22">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Dengyu</surname> <given-names>X.</given-names></name>
<name><surname>Liang</surname> <given-names>G.</given-names></name>
<name><surname>Chengliang</surname> <given-names>L.</given-names></name>
<name><surname>Yixiang</surname> <given-names>H.</given-names></name>
</person-group> (<year>2016</year>). 
<article-title>Phenotype-based robotic screening platform for leafy plant breeding</article-title>. <source>IFAC-PapersOnLine</source> <volume>49</volume>, <fpage>237</fpage>&#x2013;<lpage>241</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ifacol.2016.10.044</pub-id>
</mixed-citation>
</ref>
<ref id="B23">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Dhami</surname> <given-names>H.</given-names></name>
<name><surname>Yu</surname> <given-names>K.</given-names></name>
<name><surname>Xu</surname> <given-names>T.</given-names></name>
<name><surname>Zhu</surname> <given-names>Q.</given-names></name>
<name><surname>Dhakal</surname> <given-names>K.</given-names></name>
<name><surname>Friel</surname> <given-names>J.</given-names></name>
<etal/>
</person-group>. (<year>2020</year>). &#x201c;
<article-title>Crop height and plot estimation for phenotyping from unmanned aerial vehicles using 3D LiDAR</article-title>,&#x201d; in <conf-name>2020 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</conf-name>. (<publisher-loc>Piscataway, NJ</publisher-loc>: 
<publisher-name>IEEE</publisher-name>) <fpage>2643</fpage>&#x2013;<lpage>2649</lpage>.
</mixed-citation>
</ref>
<ref id="B24">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Di Gennaro</surname> <given-names>S. F.</given-names></name>
<name><surname>Matese</surname> <given-names>A.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Evaluation of novel precision viticulture tool for canopy biomass estimation and missing plant detection based on 2.5 D and 3D approaches using RGB images acquired by UAV platform</article-title>. <source>Plant Methods</source> <volume>16</volume>, <fpage>91</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s13007-020-00632-2</pub-id>, PMID: <pub-id pub-id-type="pmid">32636922</pub-id>
</mixed-citation>
</ref>
<ref id="B25">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Dilmurat</surname> <given-names>K.</given-names></name>
<name><surname>Sagan</surname> <given-names>V.</given-names></name>
<name><surname>Moose</surname> <given-names>S.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>AI-driven maize yield forecasting using unmanned aerial vehicle-based hyperspectral and lidar data fusion</article-title>. <source>ISPRS Ann. Photogrammetry Remote Sens. Spatial Inf. Sci.</source> <volume>3</volume>, <fpage>193</fpage>&#x2013;<lpage>199</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5194/isprs-annals-v-3-2022-193-2022</pub-id>
</mixed-citation>
</ref>
<ref id="B26">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Dupuis</surname> <given-names>J.</given-names></name>
<name><surname>Kuhlmann</surname> <given-names>H.</given-names></name>
</person-group> (<year>2014</year>). 
<article-title>High-precision surface inspection: Uncertainty evaluation within an accuracy range of 15&#x3bc;m with triangulation-based laser line scanners</article-title>. <source>J. Appl. Geodesy</source> <volume>8</volume>, <fpage>109</fpage>&#x2013;<lpage>118</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1515/jag-2014-0001</pub-id>
</mixed-citation>
</ref>
<ref id="B27">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Dupuis</surname> <given-names>J.</given-names></name>
<name><surname>Paulus</surname> <given-names>S.</given-names></name>
<name><surname>Mahlein</surname> <given-names>A.-K.</given-names></name>
<name><surname>Eichert</surname> <given-names>T.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>The impact of different leaf surface tissues on active 3D laser triangulation measurements</article-title>. <source>Photogrammetrie-Fernerkundung-Geoinformation</source>, <fpage>437</fpage>&#x2013;<lpage>447</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1127/pfg/2015/0280</pub-id>
</mixed-citation>
</ref>
<ref id="B28">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ehlert</surname> <given-names>D.</given-names></name>
<name><surname>Horn</surname> <given-names>H.-J.</given-names></name>
<name><surname>Adamek</surname> <given-names>R.</given-names></name>
</person-group> (<year>2008</year>). 
<article-title>Measuring crop biomass density by laser triangulation</article-title>. <source>Comput. Electron. Agric.</source> <volume>61</volume>, <fpage>117</fpage>&#x2013;<lpage>125</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2007.09.013</pub-id>
</mixed-citation>
</ref>
<ref id="B29">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Evans</surname> <given-names>J. R.</given-names></name>
</person-group> (<year>2013</year>). 
<article-title>Improving photosynthesis</article-title>. <source>Plant Physiol.</source> <volume>162</volume>, <fpage>1780</fpage>&#x2013;<lpage>1793</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1104/pp.113.219006</pub-id>, PMID: <pub-id pub-id-type="pmid">23812345</pub-id>
</mixed-citation>
</ref>
<ref id="B30">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Fageria</surname> <given-names>N. K.</given-names></name>
<name><surname>Baligar</surname> <given-names>V. C.</given-names></name>
<name><surname>Clark</surname> <given-names>R.</given-names></name>
</person-group> (<year>2006</year>). <source>Physiology of crop production.</source> (<publisher-loc>Binghamton, NY, USA</publisher-loc>: 
<publisher-name>Food Products Press</publisher-name>).
</mixed-citation>
</ref>
<ref id="B31">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Fan</surname> <given-names>Z.</given-names></name>
<name><surname>Sun</surname> <given-names>N.</given-names></name>
<name><surname>Qiu</surname> <given-names>Q.</given-names></name>
<name><surname>Li</surname> <given-names>T.</given-names></name>
<name><surname>Feng</surname> <given-names>Q.</given-names></name>
<name><surname>Zhao</surname> <given-names>C.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title><italic>In situ</italic> measuring stem diameters of maize crops with a high-throughput phenotyping robot</article-title>. <source>Remote Sens.</source> <volume>14</volume>, <fpage>1030</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs14041030</pub-id>
</mixed-citation>
</ref>
<ref id="B32">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Farhan</surname> <given-names>S. M.</given-names></name>
<name><surname>Yin</surname> <given-names>J.</given-names></name>
<name><surname>Chen</surname> <given-names>Z.</given-names></name>
<name><surname>Memon</surname> <given-names>M. S.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>A comprehensive review of LiDAR applications in crop management for precision agriculture</article-title>. <source>Sensors (Basel Switzerland)</source> <volume>24</volume>, <fpage>5409</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s24165409</pub-id>, PMID: <pub-id pub-id-type="pmid">39205103</pub-id>
</mixed-citation>
</ref>
<ref id="B33">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Feng</surname> <given-names>Y.</given-names></name>
<name><surname>Zhu</surname> <given-names>J.</given-names></name>
<name><surname>Song</surname> <given-names>R.</given-names></name>
<name><surname>Wang</surname> <given-names>X.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>S2EFT: Spectral-spatial-elevation fusion transformer for hyperspectral image and LiDAR classification</article-title>. <source>Knowledge-Based Syst.</source> <volume>283</volume>, <fpage>111190</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.knosys.2023.111190</pub-id>
</mixed-citation>
</ref>
<ref id="B34">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Friedli</surname> <given-names>M.</given-names></name>
<name><surname>Kirchgessner</surname> <given-names>N.</given-names></name>
<name><surname>Grieder</surname> <given-names>C.</given-names></name>
<name><surname>Liebisch</surname> <given-names>F.</given-names></name>
<name><surname>Mannale</surname> <given-names>M.</given-names></name>
<name><surname>Walter</surname> <given-names>A.</given-names></name>
</person-group> (<year>2016</year>). 
<article-title>Terrestrial 3D laser scanning to track the increase in canopy height of both monocot and dicot crop species under field conditions</article-title>. <source>Plant Methods</source> <volume>12</volume>, <fpage>1</fpage>&#x2013;<lpage>15</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s13007-016-0109-7</pub-id>, PMID: <pub-id pub-id-type="pmid">26834822</pub-id>
</mixed-citation>
</ref>
<ref id="B35">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Furukawa</surname> <given-names>Y.</given-names></name>
<name><surname>Ponce</surname> <given-names>J.</given-names></name>
</person-group> (<year>2009</year>). 
<article-title>Accurate, dense, and robust multiview stereopsis</article-title>. <source>IEEE Trans. Pattern Anal. Mach. Intell.</source> <volume>32</volume>, <fpage>1362</fpage>&#x2013;<lpage>1376</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TPAMI.2009.161</pub-id>, PMID: <pub-id pub-id-type="pmid">20558871</pub-id>
</mixed-citation>
</ref>
<ref id="B36">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gano</surname> <given-names>B.</given-names></name>
<name><surname>Bhadra</surname> <given-names>S.</given-names></name>
<name><surname>Vilbig</surname> <given-names>J. M.</given-names></name>
<name><surname>Ahmed</surname> <given-names>N.</given-names></name>
<name><surname>Sagan</surname> <given-names>V.</given-names></name>
<name><surname>Shakoor</surname> <given-names>N.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Drone-based imaging sensors, techniques, and applications in plant phenotyping for crop breeding: A comprehensive review</article-title>. <source>Plant Phenome J.</source> <volume>7</volume>, <elocation-id>e20100</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/ppj2.20100</pub-id>
</mixed-citation>
</ref>
<ref id="B37">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gao</surname> <given-names>Y.</given-names></name>
<name><surname>Li</surname> <given-names>Z.</given-names></name>
<name><surname>Li</surname> <given-names>B.</given-names></name>
<name><surname>Zhang</surname> <given-names>L.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Extraction of corn plant phenotypic parameters with keypoint detection and stereo images</article-title>. <source>Agronomy</source> <volume>14</volume>, <fpage>1110</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy14061110</pub-id>
</mixed-citation>
</ref>
<ref id="B38">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ga&#x161;parovi&#x107;</surname> <given-names>M.</given-names></name>
<name><surname>Jurjevi&#x107;</surname> <given-names>L.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>Gimbal influence on the stability of exterior orientation parameters of UAV acquired images</article-title>. <source>Sensors</source> <volume>17</volume>, <fpage>401</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s17020401</pub-id>, PMID: <pub-id pub-id-type="pmid">28218699</pub-id>
</mixed-citation>
</ref>
<ref id="B39">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gibbs</surname> <given-names>J. A.</given-names></name>
<name><surname>Pound</surname> <given-names>M. P.</given-names></name>
<name><surname>French</surname> <given-names>A. P.</given-names></name>
<name><surname>Wells</surname> <given-names>D. M.</given-names></name>
<name><surname>Murchie</surname> <given-names>E. H.</given-names></name>
<name><surname>Pridmore</surname> <given-names>T. P.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Active vision and surface reconstruction for 3D plant shoot modelling</article-title>. <source>IEEE/ACM Trans. Comput. Biol. Bioinf.</source> <volume>17</volume>, <fpage>1907</fpage>&#x2013;<lpage>1917</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TCBB.2019.2900768</pub-id>
</mixed-citation>
</ref>
<ref id="B40">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Gokturk</surname> <given-names>S. B.</given-names></name>
<name><surname>Yalcin</surname> <given-names>H.</given-names></name>
<name><surname>Bamji</surname> <given-names>C.</given-names></name>
</person-group> (<year>2004</year>). &#x201c;
<article-title>A time-of-flight depth sensor-system description, issues and solutions</article-title>,&#x201d; in <conf-name>2004 Conference on Computer Vision and Pattern Recognition Workshop</conf-name>, Vol. <volume>35</volume>. (<publisher-loc>Piscataway, NJ</publisher-loc>: 
<publisher-name>IEEE Computer Society</publisher-name>). 
</mixed-citation>
</ref>
<ref id="B41">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gorte</surname> <given-names>B.</given-names></name>
<name><surname>Pfeifer</surname> <given-names>N.</given-names></name>
</person-group> (<year>2004</year>). 
<article-title>Structuring laser-scanned trees using 3D mathematical morphology</article-title>. <source>Int. Arch. Photogrammetry Remote Sens.</source> <volume>35</volume>, <fpage>929</fpage>&#x2013;<lpage>933</lpage>.
</mixed-citation>
</ref>
<ref id="B42">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>G&#xfc;ler</surname> <given-names>S.</given-names></name>
<name><surname>Y&#x131;ld&#x131;r&#x131;m</surname> <given-names>&#x130;.E.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>A distributed relative localization approach for air-ground robot formations with onboard sensing</article-title>. <source>Control Eng. Pract.</source> <volume>135</volume>, <fpage>105492</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.conengprac.2023.105492</pub-id>
</mixed-citation>
</ref>
<ref id="B43">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Harandi</surname> <given-names>N.</given-names></name>
<name><surname>Vandenberghe</surname> <given-names>B.</given-names></name>
<name><surname>Vankerschaver</surname> <given-names>J.</given-names></name>
<name><surname>Depuydt</surname> <given-names>S.</given-names></name>
<name><surname>Van Messem</surname> <given-names>A.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>How to make sense of 3D representations for plant phenotyping: a compendium of processing and analysis techniques</article-title>. <source>Plant Methods</source> <volume>19</volume>, <fpage>60</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s13007-023-01031-z</pub-id>, PMID: <pub-id pub-id-type="pmid">37353846</pub-id>
</mixed-citation>
</ref>
<ref id="B44">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Harkel</surname> <given-names>J. T.</given-names></name>
<name><surname>Bartholomeus</surname> <given-names>H.</given-names></name>
<name><surname>Kooistra</surname> <given-names>L.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Biomass and crop height estimation of different crops using UAV-based LiDAR</article-title>. <source>Remote Sens.</source> <volume>12</volume>, <fpage>17</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs12010017</pub-id>
</mixed-citation>
</ref>
<ref id="B45">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Hieda</surname> <given-names>N.</given-names></name>
</person-group> (<year>2015</year>). <source>Digital video projection for interactive entertainment</source> (<publisher-loc>Canada</publisher-loc>: 
<publisher-name>McGill University</publisher-name>).
</mixed-citation>
</ref>
<ref id="B46">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Holman</surname> <given-names>F. H.</given-names></name>
<name><surname>Riche</surname> <given-names>A. B.</given-names></name>
<name><surname>Micber</surname> <given-names>A.</given-names></name>
<name><surname>Castle</surname> <given-names>M.</given-names></name>
<name><surname>Wooster</surname> <given-names>M. J.</given-names></name>
<name><surname>Sherwood</surname> <given-names>M. J.</given-names></name>
</person-group> (<year>2016</year>). 
<article-title>High throughput field phenotyping of wheat plant height and growth rate in field plot trials using UAV based remote sensing</article-title>. <source>Remote Sens.</source> <volume>8</volume>, <fpage>1031</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs8121031</pub-id>
</mixed-citation>
</ref>
<ref id="B47">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Hosoi</surname> <given-names>F.</given-names></name>
<name><surname>Omasa</surname> <given-names>K.</given-names></name>
</person-group> (<year>2009</year>). 
<article-title>Detecting seasonal change of broad-leaved woody canopy leaf area density profile using 3D portable LIDAR imaging</article-title>. <source>Funct. Plant Biol.</source> <volume>36</volume>, <fpage>998</fpage>&#x2013;<lpage>1005</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1071/FP09113</pub-id>, PMID: <pub-id pub-id-type="pmid">32688711</pub-id>
</mixed-citation>
</ref>
<ref id="B48">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Hu</surname> <given-names>C.</given-names></name>
<name><surname>Li</surname> <given-names>P.</given-names></name>
<name><surname>Pan</surname> <given-names>Z.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>Phenotyping of poplar seedling leaves based on a 3D visualization method</article-title>. <source>Int. J. Agric. Biol. Eng.</source> <volume>11</volume>, <fpage>145</fpage>&#x2013;<lpage>151</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.25165/j.ijabe.20181106.4110</pub-id>
</mixed-citation>
</ref>
<ref id="B49">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Huang</surname> <given-names>P.</given-names></name>
<name><surname>Luo</surname> <given-names>X.</given-names></name>
<name><surname>Jin</surname> <given-names>J.</given-names></name>
<name><surname>Wang</surname> <given-names>L.</given-names></name>
<name><surname>Zhang</surname> <given-names>L.</given-names></name>
<name><surname>Liu</surname> <given-names>J.</given-names></name>
<etal/>
</person-group>. (<year>2018</year>). 
<article-title>Improving high-throughput phenotyping using fusion of close-range hyperspectral camera and low-cost depth sensor</article-title>. <source>Sensors</source> <volume>18</volume>, <fpage>2711</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s18082711</pub-id>, PMID: <pub-id pub-id-type="pmid">30126148</pub-id>
</mixed-citation>
</ref>
<ref id="B50">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Iqbal</surname> <given-names>J.</given-names></name>
<name><surname>Xu</surname> <given-names>R.</given-names></name>
<name><surname>Halloran</surname> <given-names>H.</given-names></name>
<name><surname>Li</surname> <given-names>C.</given-names></name>
</person-group> (<year>2020</year>a). 
<article-title>Development of a multi-purpose autonomous differential drive mobile robot for plant phenotyping and soil sensing</article-title>. <source>Electronics</source> <volume>9</volume>, <fpage>1550</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/electronics9091550</pub-id>
</mixed-citation>
</ref>
<ref id="B51">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Iqbal</surname> <given-names>J.</given-names></name>
<name><surname>Xu</surname> <given-names>R.</given-names></name>
<name><surname>Sun</surname> <given-names>S.</given-names></name>
<name><surname>Li</surname> <given-names>C.</given-names></name>
</person-group> (<year>2020</year>b). 
<article-title>Simulation of an autonomous mobile robot for LiDAR-based in-field phenotyping and navigation</article-title>. <source>Robotics</source> <volume>9</volume>, <fpage>46</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/robotics9020046</pub-id>
</mixed-citation>
</ref>
<ref id="B52">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ivanov</surname> <given-names>N.</given-names></name>
<name><surname>Boissard</surname> <given-names>P.</given-names></name>
<name><surname>Chapron</surname> <given-names>M.</given-names></name>
<name><surname>Andrieu</surname> <given-names>B.</given-names></name>
</person-group> (<year>1995</year>). 
<article-title>Computer stereo plotting for 3-D reconstruction of a maize canopy</article-title>. <source>Agric. For. Meteorology</source> <volume>75</volume>, <fpage>85</fpage>&#x2013;<lpage>102</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/0168-1923(94)02204-W</pub-id>
</mixed-citation>
</ref>
<ref id="B53">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jancosek</surname> <given-names>M.</given-names></name>
<name><surname>Pajdla</surname> <given-names>T.</given-names></name>
</person-group> (<year>2011</year>). 
<article-title>Multi-view reconstruction preserving weakly-supported surfaces</article-title>. <source>CVPR</source> <volume>2011</volume>, <fpage>3121</fpage>&#x2013;<lpage>3128</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/CVPR.2011.5995693</pub-id>
</mixed-citation>
</ref>
<ref id="B54">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Javaid</surname> <given-names>M.</given-names></name>
<name><surname>Haleem</surname> <given-names>A.</given-names></name>
<name><surname>Singh</surname> <given-names>R. P.</given-names></name>
<name><surname>Suman</surname> <given-names>R.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Industrial perspectives of 3D scanning: features, roles and it&#x2019;s analytical applications</article-title>. <source>Sensors Int.</source> <volume>2</volume>, <fpage>100114</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.sintl.2021.100114</pub-id>
</mixed-citation>
</ref>
<ref id="B55">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Jay</surname> <given-names>S.</given-names></name>
<name><surname>Rabatel</surname> <given-names>G.</given-names></name>
<name><surname>Gorretta</surname> <given-names>N.</given-names></name>
</person-group> (<year>2014</year>). &#x201c;
<article-title>In-field crop row stereo-reconstruction for plant phenotyping</article-title>,&#x201d; in <conf-name>Second International Conference on Robotics and Associated High-Technologies and Equipment for Agriculture and Forestry (RHEA-2014)</conf-name>. (<publisher-loc>Madrid, Spain</publisher-loc>: 
<publisher-name>RHEA Project (printed by PGM)</publisher-name>) <fpage>10</fpage>.
</mixed-citation>
</ref>
<ref id="B56">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Jeon</surname> <given-names>H.-G.</given-names></name>
<name><surname>Park</surname> <given-names>J.</given-names></name>
<name><surname>Choe</surname> <given-names>G.</given-names></name>
<name><surname>Park</surname> <given-names>J.</given-names></name>
<name><surname>Bok</surname> <given-names>Y.</given-names></name>
<name><surname>Tai</surname> <given-names>Y.-W.</given-names></name>
<etal/>
</person-group>. (<year>2015</year>). &#x201c;
<article-title>Accurate depth map estimation from a lenslet light field camera</article-title>,&#x201d; in <conf-name>Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition</conf-name>. (<publisher-loc>Piscataway, NJ</publisher-loc>: 
<publisher-name>IEEE</publisher-name>) <fpage>1547</fpage>&#x2013;<lpage>1555</lpage>.
</mixed-citation>
</ref>
<ref id="B57">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jin</surname> <given-names>S.</given-names></name>
<name><surname>Sun</surname> <given-names>X.</given-names></name>
<name><surname>Wu</surname> <given-names>F.</given-names></name>
<name><surname>Su</surname> <given-names>Y.</given-names></name>
<name><surname>Li</surname> <given-names>Y.</given-names></name>
<name><surname>Song</surname> <given-names>S.</given-names></name>
<etal/>
</person-group>. (<year>2021</year>). 
<article-title>Lidar sheds new light on plant phenomics for plant breeding and management: Recent advances and future prospects</article-title>. <source>ISPRS J. Photogrammetry Remote Sens.</source> <volume>171</volume>, <fpage>202</fpage>&#x2013;<lpage>223</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.isprsjprs.2020.11.006</pub-id>
</mixed-citation>
</ref>
<ref id="B58">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kahraman</surname> <given-names>S.</given-names></name>
<name><surname>Bacher</surname> <given-names>R.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>A comprehensive review of hyperspectral data fusion with lidar and sar data</article-title>. <source>Annu. Rev. Control</source> <volume>51</volume>, <fpage>236</fpage>&#x2013;<lpage>253</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.arcontrol.2021.03.003</pub-id>
</mixed-citation>
</ref>
<ref id="B59">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kantaros</surname> <given-names>A.</given-names></name>
<name><surname>Ganetsos</surname> <given-names>T.</given-names></name>
<name><surname>Petrescu</surname> <given-names>F. I. T.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Three-dimensional printing and 3D scanning: Emerging technologies exhibiting high potential in the field of cultural heritage</article-title>. <source>Appl. Sci.</source> <volume>13</volume>, <fpage>4777</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/app13084777</pub-id>
</mixed-citation>
</ref>
<ref id="B60">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Katz</surname> <given-names>B.</given-names></name>
<name><surname>Di Carlo</surname> <given-names>J.</given-names></name>
<name><surname>Kim</surname> <given-names>S.</given-names></name>
</person-group> (<year>2019</year>). &#x201c;
<article-title>Mini cheetah: A platform for pushing the limits of dynamic quadruped control</article-title>,&#x201d; in <conf-name>2019 International Conference on Robotics and Automation (ICRA)</conf-name>. (<publisher-loc>Piscataway, NJ</publisher-loc>: 
<publisher-name>IEEE</publisher-name>) <fpage>6295</fpage>&#x2013;<lpage>6301</lpage>.
</mixed-citation>
</ref>
<ref id="B61">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kazmi</surname> <given-names>W.</given-names></name>
<name><surname>Foix</surname> <given-names>S.</given-names></name>
<name><surname>Aleny&#xe0;</surname> <given-names>G.</given-names></name>
<name><surname>Andersen</surname> <given-names>H. J.</given-names></name>
</person-group> (<year>2014</year>). 
<article-title>Indoor and outdoor depth imaging of leaves with time-of-flight and stereo vision sensors: Analysis and comparison</article-title>. <source>ISPRS J. Photogrammetry Remote Sens.</source> <volume>88</volume>, <fpage>128</fpage>&#x2013;<lpage>146</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.isprsjprs.2013.11.012</pub-id>
</mixed-citation>
</ref>
<ref id="B62">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Keller</surname> <given-names>M.</given-names></name>
<name><surname>Kolb</surname> <given-names>A.</given-names></name>
</person-group> (<year>2009</year>). 
<article-title>Real-time simulation of time-of-flight sensors</article-title>. <source>Simulation Model. Pract. Theory</source> <volume>17</volume>, <fpage>967</fpage>&#x2013;<lpage>978</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.simpat.2009.03.004</pub-id>
</mixed-citation>
</ref>
<ref id="B63">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kim</surname> <given-names>W.-S.</given-names></name>
<name><surname>Lee</surname> <given-names>D.-H.</given-names></name>
<name><surname>Kim</surname> <given-names>Y.-J.</given-names></name>
<name><surname>Kim</surname> <given-names>T.</given-names></name>
<name><surname>Lee</surname> <given-names>W.-S.</given-names></name>
<name><surname>Choi</surname> <given-names>C.-H.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Stereo-vision-based crop height estimation for agricultural robots</article-title>. <source>Comput. Electron. Agric.</source> <volume>181</volume>, <fpage>105937</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2020.105937</pub-id>
</mixed-citation>
</ref>
<ref id="B64">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kirchgessner</surname> <given-names>N.</given-names></name>
<name><surname>Liebisch</surname> <given-names>F.</given-names></name>
<name><surname>Yu</surname> <given-names>K.</given-names></name>
<name><surname>Pfeifer</surname> <given-names>J.</given-names></name>
<name><surname>Friedli</surname> <given-names>M.</given-names></name>
<name><surname>Hund</surname> <given-names>A.</given-names></name>
<etal/>
</person-group>. (<year>2016</year>). 
<article-title>The ETH field phenotyping platform FIP: a cable-suspended multi-sensor system</article-title>. <source>Funct. Plant Biol.</source> <volume>44</volume>, <fpage>154</fpage>&#x2013;<lpage>168</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1071/FP16165</pub-id>, PMID: <pub-id pub-id-type="pmid">32480554</pub-id>
</mixed-citation>
</ref>
<ref id="B65">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kise</surname> <given-names>M.</given-names></name>
<name><surname>Zhang</surname> <given-names>Q.</given-names></name>
</person-group> (<year>2008</year>). 
<article-title>Development of a stereovision sensing system for 3D crop row structure mapping and tractor guidance</article-title>. <source>Biosyst. Eng.</source> <volume>101</volume>, <fpage>191</fpage>&#x2013;<lpage>198</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2008.08.001</pub-id>
</mixed-citation>
</ref>
<ref id="B66">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Klapa</surname> <given-names>P.</given-names></name>
<name><surname>Mitka</surname> <given-names>B.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>Edge effect and its impact upon the accuracy of 2D and 3D modelling using laser scanning</article-title>. <source>Geomatics Landmanagement Landscape</source>. <volume>1</volume>, <fpage>25</fpage>&#x2013;<lpage>33</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.15576/GLL/2017.1.25</pub-id>
</mixed-citation>
</ref>
<ref id="B67">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Klodt</surname> <given-names>M.</given-names></name>
<name><surname>Cremers</surname> <given-names>D.</given-names></name>
</person-group> (<year>2015</year>). &#x201c;
<article-title>High-resolution plant shape measurements from&#xa0;multi-view stereo reconstruction</article-title>,&#x201d; in <conf-name>Computer Vision-ECCV 2014 Workshops</conf-name>, <conf-loc>Zurich, Switzerland</conf-loc>, <conf-date>September 6&#x2013;7 and 12, 2014</conf-date> (<publisher-loc>Cham, Switzerland</publisher-loc>: 
<publisher-name>Springer</publisher-name>) Vol.&#xa0;<volume>13</volume>. <fpage>174</fpage>&#x2013;<lpage>184</lpage>, Proceedings, Part IV.
</mixed-citation>
</ref>
<ref id="B68">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Klodt</surname> <given-names>M.</given-names></name>
<name><surname>Herzog</surname> <given-names>K.</given-names></name>
<name><surname>T&#xf6;pfer</surname> <given-names>R.</given-names></name>
<name><surname>Cremers</surname> <given-names>D.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Field phenotyping of grapevine growth using dense stereo reconstruction</article-title>. <source>BMC Bioinf.</source> <volume>16</volume>, <fpage>1</fpage>&#x2013;<lpage>11</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s12859-015-0560-x</pub-id>, PMID: <pub-id pub-id-type="pmid">25943369</pub-id>
</mixed-citation>
</ref>
<ref id="B69">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Klose</surname> <given-names>R.</given-names></name>
<name><surname>Penlington</surname> <given-names>J.</given-names></name>
<name><surname>Ruckelshausen</surname> <given-names>A.</given-names></name>
</person-group> (<year>2009</year>). 
<article-title>Usability study of 3D time-of-flight cameras for automatic plant phenotyping</article-title>. <source>Bornimer Agrartechnische Berichte</source> <volume>69</volume>, <fpage>12</fpage>.
</mixed-citation>
</ref>
<ref id="B70">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Kumar</surname> <given-names>P.</given-names></name>
<name><surname>Connor</surname> <given-names>J.</given-names></name>
<name><surname>Mikiavcic</surname> <given-names>S.</given-names></name>
</person-group> (<year>2014</year>). &#x201c;
<article-title>High-throughput 3D reconstruction of plant shoots for phenotyping</article-title>,&#x201d; in <conf-name>2014 13th International Conference on Control Automation Robotics &amp; Vision (ICARCV)</conf-name>. (<publisher-loc>Piscataway, NJ</publisher-loc>: 
<publisher-name>IEEE</publisher-name>) <fpage>211</fpage>&#x2013;<lpage>216</lpage>.
</mixed-citation>
</ref>
<ref id="B71">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Langstroff</surname> <given-names>A.</given-names></name>
<name><surname>Heuermann</surname> <given-names>M. C.</given-names></name>
<name><surname>Stahl</surname> <given-names>A.</given-names></name>
<name><surname>Junker</surname> <given-names>A.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Opportunities and limits of controlled-environment plant phenotyping for climate response traits</article-title>. <source>Theor. Appl. Genet.</source> <volume>135</volume>, <fpage>1</fpage>&#x2013;<lpage>16</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00122-021-03892-1</pub-id>, PMID: <pub-id pub-id-type="pmid">34302493</pub-id>
</mixed-citation>
</ref>
<ref id="B72">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Laurentini</surname> <given-names>A.</given-names></name>
</person-group> (<year>1995</year>). 
<article-title>How far 3D shapes can be understood from 2D silhouettes</article-title>. <source>IEEE Trans. Pattern Anal. Mach. Intell.</source> <volume>17</volume>, <fpage>188</fpage>&#x2013;<lpage>195</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/34.368170</pub-id>
</mixed-citation>
</ref>
<ref id="B73">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lee</surname> <given-names>K.-C.</given-names></name>
<name><surname>Yang</surname> <given-names>J.-S.</given-names></name>
<name><surname>Yu</surname> <given-names>H. H.</given-names></name>
</person-group> (<year>2013</year>). 
<article-title>Development and evaluation of a petal thickness measuring device based on the dual laser triangulation method</article-title>. <source>Comput. Electron. Agric.</source> <volume>99</volume>, <fpage>85</fpage>&#x2013;<lpage>92</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2013.09.001</pub-id>
</mixed-citation>
</ref>
<ref id="B74">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lemmens</surname> <given-names>M.</given-names></name>
<name><surname>Lemmens</surname> <given-names>M.</given-names></name>
</person-group> (<year>2011</year>). 
<article-title>Terrestrial laser scanning</article-title>. <source>Geo-Information: Technologies Appl. Environ.</source> <volume>5</volume>, <fpage>101</fpage>&#x2013;<lpage>121</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/978-94-007-1667-4</pub-id>
</mixed-citation>
</ref>
<ref id="B75">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>L.</given-names></name>
</person-group> (<year>2014</year>). 
<article-title>Time-of-flight camera&#x2014;an introduction</article-title>. <source>Tech. White Paper</source>. <volume>14</volume>, <fpage>SLOA190B</fpage>.
</mixed-citation>
</ref>
<ref id="B76">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>F.</given-names></name>
<name><surname>Piasecki</surname> <given-names>C.</given-names></name>
<name><surname>Millwood</surname> <given-names>R. J.</given-names></name>
<name><surname>Wolfe</surname> <given-names>B.</given-names></name>
<name><surname>Mazarei</surname> <given-names>M.</given-names></name>
<name><surname>Stewart</surname> <given-names>C. N.</given-names> <suffix>Jr.</suffix></name>
</person-group> (<year>2020</year>). 
<article-title>High-throughput switchgrass phenotyping and biomass modeling by UAV</article-title>. <source>Front. Plant Sci.</source> <volume>11</volume>, <elocation-id>574073</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2020.574073</pub-id>, PMID: <pub-id pub-id-type="pmid">33193511</pub-id>
</mixed-citation>
</ref>
<ref id="B77">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>J.</given-names></name>
<name><surname>Tang</surname> <given-names>L.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>Developing a low-cost 3D plant morphological traits characterization system</article-title>. <source>Comput. Electron. Agric.</source> <volume>143</volume>, <fpage>1</fpage>&#x2013;<lpage>13</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2017.09.025</pub-id>
</mixed-citation>
</ref>
<ref id="B78">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>Y.</given-names></name>
<name><surname>Wen</surname> <given-names>W.</given-names></name>
<name><surname>Fan</surname> <given-names>J.</given-names></name>
<name><surname>Gou</surname> <given-names>W.</given-names></name>
<name><surname>Gu</surname> <given-names>S.</given-names></name>
<name><surname>Lu</surname> <given-names>X.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Multi-source data fusion&#xa0;improves time-series phenotype accuracy in maize under a field high-throughput phenotyping platform</article-title>. <source>Plant Phenomics</source> <volume>5</volume>, <fpage>0043</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.34133/plantphenomics.0043</pub-id>, PMID: <pub-id pub-id-type="pmid">37223316</pub-id>
</mixed-citation>
</ref>
<ref id="B79">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>D.</given-names></name>
<name><surname>Xu</surname> <given-names>L.</given-names></name>
<name><surname>Tang</surname> <given-names>X.</given-names></name>
<name><surname>Sun</surname> <given-names>S.</given-names></name>
<name><surname>Cai</surname> <given-names>X.</given-names></name>
<name><surname>Zhang</surname> <given-names>P.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>3D imaging of greenhouse plants with an inexpensive binocular stereo vision system</article-title>. <source>Remote Sens.</source> <volume>9</volume>, <fpage>508</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs9050508</pub-id>
</mixed-citation>
</ref>
<ref id="B80">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lin</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>LiDAR: An important tool for next-generation phenotyping technology of high potential for plant phenomics</article-title>? <source>Comput. Electron. Agric.</source> <volume>119</volume>, <fpage>61</fpage>&#x2013;<lpage>73</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2015.10.011</pub-id>
</mixed-citation>
</ref>
<ref id="B81">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lin</surname> <given-names>T.-Y.</given-names></name>
<name><surname>Juang</surname> <given-names>J.-G.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Application of 3D point cloud map and image identification to mobile robot navigation</article-title>. <source>Measurement Control</source> <volume>56</volume>, <fpage>911</fpage>&#x2013;<lpage>927</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1177/00202940221136242</pub-id>
</mixed-citation>
</ref>
<ref id="B82">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Liu</surname> <given-names>X.</given-names></name>
<name><surname>Dong</surname> <given-names>X.</given-names></name>
<name><surname>Xue</surname> <given-names>Q.</given-names></name>
<name><surname>Leskovar</surname> <given-names>D. I.</given-names></name>
<name><surname>Jifon</surname> <given-names>J.</given-names></name>
<name><surname>Butnor</surname> <given-names>J. R.</given-names></name>
<etal/>
</person-group>. (<year>2018</year>). 
<article-title>Ground penetrating radar (GPR) detects fine roots of agricultural crops in the field</article-title>. <source>Plant Soil</source> <volume>423</volume>, <fpage>517</fpage>&#x2013;<lpage>531</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11104-017-3531-3</pub-id>
</mixed-citation>
</ref>
<ref id="B83">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Liu</surname> <given-names>Z.</given-names></name>
<name><surname>Wang</surname> <given-names>X.</given-names></name>
<name><surname>Hua</surname> <given-names>J.</given-names></name>
<name><surname>Wang</surname> <given-names>H.</given-names></name>
<name><surname>Huo</surname> <given-names>Q.</given-names></name>
<name><surname>Kang</surname> <given-names>M.</given-names></name>
</person-group> (<year>2024</year>). &#x201c;
<article-title>Plant digital twins based on model phenotyping and functional-structural plant modeling</article-title>,&#x201d; in <conf-name>2024 IEEE 4th International Conference on Digital Twins and Parallel Intelligence (DTPI)</conf-name>. (<publisher-loc>Piscataway, NJ</publisher-loc>: 
<publisher-name>IEEE</publisher-name>) <fpage>211</fpage>&#x2013;<lpage>214</lpage>.
</mixed-citation>
</ref>
<ref id="B84">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lobet</surname> <given-names>G.</given-names></name>
<name><surname>Pag&#xe8;s</surname> <given-names>L.</given-names></name>
<name><surname>Draye</surname> <given-names>X.</given-names></name>
</person-group> (<year>2011</year>). 
<article-title>A novel image-analysis toolbox enabling quantitative analysis of root system architecture</article-title>. <source>Plant Physiol.</source> <volume>157</volume>, <fpage>29</fpage>&#x2013;<lpage>39</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1104/pp.111.179895</pub-id>, PMID: <pub-id pub-id-type="pmid">21771915</pub-id>
</mixed-citation>
</ref>
<ref id="B85">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lombardi</surname> <given-names>E.</given-names></name>
<name><surname>Ferrio</surname> <given-names>J. P.</given-names></name>
<name><surname>Rodr&#xed;guez-Robles</surname> <given-names>U.</given-names></name>
<name><surname>Resco de Dios</surname> <given-names>V.</given-names></name>
<name><surname>Voltas</surname> <given-names>J.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Ground-penetrating radar as phenotyping tool for characterizing intraspecific variability in root traits of a widespread conifer</article-title>. <source>Plant Soil</source> <volume>468</volume>, <fpage>319</fpage>&#x2013;<lpage>336</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11104-021-05135-0</pub-id>
</mixed-citation>
</ref>
<ref id="B86">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lopes</surname> <given-names>M. S.</given-names></name>
<name><surname>Moreira</surname> <given-names>A. P.</given-names></name>
<name><surname>Silva</surname> <given-names>M. F.</given-names></name>
<name><surname>Santos</surname> <given-names>F.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Robotic arm development for a quadruped robot</article-title>. <source>Climbing Walking Robots Conf.</source> <volume>811</volume>, <fpage>63</fpage>&#x2013;<lpage>74</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/978-3-031-47272-5_6</pub-id>
</mixed-citation>
</ref>
<ref id="B87">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Lou</surname> <given-names>L.</given-names></name>
<name><surname>Liu</surname> <given-names>Y.</given-names></name>
<name><surname>Han</surname> <given-names>J.</given-names></name>
<name><surname>Doonan</surname> <given-names>J. H.</given-names></name>
</person-group> (<year>2014</year>a). &#x201c;
<article-title>Accurate multi-view stereo 3D reconstruction for cost-effective plant phenotyping</article-title>,&#x201d; in <conf-name>Image Analysis and Recognition: 11th International Conference, ICIAR 2014</conf-name>, <conf-loc>Vilamoura, Portugal</conf-loc>, <conf-date>October 22-24, 2014</conf-date>, (<publisher-loc>Cham, Switzerland</publisher-loc>: 
<publisher-name>Springer</publisher-name>) Vol. <volume>11</volume>. <fpage>349</fpage>&#x2013;<lpage>356</lpage>, Proceedings, Part II.
</mixed-citation>
</ref>
<ref id="B88">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Lou</surname> <given-names>L.</given-names></name>
<name><surname>Liu</surname> <given-names>Y.</given-names></name>
<name><surname>Shen</surname> <given-names>M.</given-names></name>
<name><surname>Han</surname> <given-names>J.</given-names></name>
<name><surname>Corke</surname> <given-names>F.</given-names></name>
<name><surname>Doonan</surname> <given-names>J. H.</given-names></name>
</person-group> (<year>2015</year>). &#x201c;
<article-title>Estimation of branch angle from 3D point cloud of plants</article-title>,&#x201d; in <conf-name>2015 International Conference on 3D Vision</conf-name>. (<publisher-loc>Piscataway, NJ</publisher-loc>: 
<publisher-name>IEEE</publisher-name>) <fpage>554</fpage>&#x2013;<lpage>561</lpage>.
</mixed-citation>
</ref>
<ref id="B89">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Lou</surname> <given-names>L.</given-names></name>
<name><surname>Liu</surname> <given-names>Y.</given-names></name>
<name><surname>Sheng</surname> <given-names>M.</given-names></name>
<name><surname>Han</surname> <given-names>J.</given-names></name>
<name><surname>Doonan</surname> <given-names>J. H.</given-names></name>
</person-group> (<year>2014</year>). &#x201c;
<article-title>A cost-effective automatic 3D reconstruction pipeline for plants using multi-view images</article-title>,&#x201d; in <conf-name>Advances in Autonomous Robotics Systems: 15th Annual Conference, TAROS 2014</conf-name>, <conf-loc>Birmingham, UK</conf-loc>, <conf-date>September 1-3, 2014</conf-date>. (<publisher-loc>Cham, Switzerland</publisher-loc>: 
<publisher-name>Springer</publisher-name>) <fpage>221</fpage>&#x2013;<lpage>230</lpage>.
</mixed-citation>
</ref>
<ref id="B90">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lu</surname> <given-names>T.</given-names></name>
<name><surname>Ding</surname> <given-names>K.</given-names></name>
<name><surname>Fu</surname> <given-names>W.</given-names></name>
<name><surname>Li</surname> <given-names>S.</given-names></name>
<name><surname>Guo</surname> <given-names>A.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Coupled adversarial learning for fusion classification of hyperspectral and LiDAR data</article-title>. <source>Inf. Fusion</source> <volume>93</volume>, <fpage>118</fpage>&#x2013;<lpage>131</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.inffus.2022.12.020</pub-id>
</mixed-citation>
</ref>
<ref id="B91">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lynch</surname> <given-names>J. P.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Harnessing root architecture to address global challenges</article-title>. <source>Plant J.</source> <volume>109</volume>, <fpage>415</fpage>&#x2013;<lpage>431</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/tpj.15560</pub-id>, PMID: <pub-id pub-id-type="pmid">34724260</pub-id>
</mixed-citation>
</ref>
<ref id="B92">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ma</surname> <given-names>X.</given-names></name>
<name><surname>Wei</surname> <given-names>B.</given-names></name>
<name><surname>Guan</surname> <given-names>H.</given-names></name>
<name><surname>Yu</surname> <given-names>S.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>A method of calculating phenotypic traits for soybean canopies based on three-dimensional point cloud</article-title>. <source>Ecol. Inf.</source> <volume>68</volume>, <fpage>101524</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ecoinf.2021.101524</pub-id>
</mixed-citation>
</ref>
<ref id="B93">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Madec</surname> <given-names>S.</given-names></name>
<name><surname>Baret</surname> <given-names>F.</given-names></name>
<name><surname>de Solan</surname> <given-names>B.</given-names></name>
<name><surname>Thomas</surname> <given-names>S.</given-names></name>
<name><surname>Dutartre</surname> <given-names>D.</given-names></name>
<name><surname>Jezequel</surname> <given-names>S.</given-names></name>
<etal/>
</person-group>. (<year>2017</year>).&#xa0;
<article-title>High-throughput phenotyping of plant height: comparing unmanned aerial vehicles and ground LiDAR estimates</article-title>. <source>Front. Plant Sci.</source> <volume>8</volume>, <elocation-id>2002</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2017.02002</pub-id>, PMID: <pub-id pub-id-type="pmid">29230229</pub-id>
</mixed-citation>
</ref>
<ref id="B94">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Malambo</surname> <given-names>L.</given-names></name>
<name><surname>Popescu</surname> <given-names>S. C.</given-names></name>
<name><surname>Horne</surname> <given-names>D. W.</given-names></name>
<name><surname>Pugh</surname> <given-names>N. A.</given-names></name>
<name><surname>Rooney</surname> <given-names>W. L.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Automated detection and measurement of individual sorghum panicles using density-based clustering of terrestrial lidar data</article-title>. <source>ISPRS J. Photogrammetry Remote Sens.</source> <volume>149</volume>, <fpage>1</fpage>&#x2013;<lpage>13</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.isprsjprs.2018.12.015</pub-id>
</mixed-citation>
</ref>
<ref id="B95">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Malhotra</surname> <given-names>A.</given-names></name>
<name><surname>Gupta</surname> <given-names>K.</given-names></name>
<name><surname>Kant</surname> <given-names>K.</given-names></name>
</person-group> (<year>2011</year>). 
<article-title>Laser triangulation for 3D profiling of target</article-title>. <source>Int. J. Comput. Appl.</source> <volume>35</volume>, <fpage>47</fpage>&#x2013;<lpage>50</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5120/4398-6112</pub-id>
</mixed-citation>
</ref>
<ref id="B96">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Mitsanis</surname> <given-names>C.</given-names></name>
<name><surname>Hurst</surname> <given-names>W.</given-names></name>
<name><surname>Tekinerdogan</surname> <given-names>B.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>A 3D functional plant modelling framework for agricultural digital twins</article-title>. <source>Comput. Electron. Agric.</source> <volume>218</volume>, <fpage>108733</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.108733</pub-id>
</mixed-citation>
</ref>
<ref id="B97">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Moller</surname> <given-names>K.</given-names></name>
<name><surname>Klose</surname> <given-names>R.</given-names></name>
<name><surname>Wunder</surname> <given-names>E.</given-names></name>
<name><surname>Busemeyer</surname> <given-names>L.</given-names></name>
<name><surname>Ruckelshausen</surname> <given-names>A.</given-names></name>
</person-group> (<year>2009</year>). 
<article-title>Sensor based system to determine the height of triticale in field trials</article-title>. <source>Proc. Soc. Eng. Agric. Conf.&#x2013;Diverse Challenges Innovative Solutions</source>.
</mixed-citation>
</ref>
<ref id="B98">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Nam</surname> <given-names>K.-H.</given-names></name>
<name><surname>Ko</surname> <given-names>E. M.</given-names></name>
<name><surname>Mun</surname> <given-names>S.</given-names></name>
<name><surname>Kim</surname> <given-names>C.-G.</given-names></name>
</person-group> (<year>2014</year>). 
<article-title>Measuring leaf areas with a structured-light 3D scanner</article-title>. <source>Korean J. Ecol. Environ.</source> <volume>47</volume>, <fpage>232</fpage>&#x2013;<lpage>238</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.11614/KSL.2014.47.3.232</pub-id>
</mixed-citation>
</ref>
<ref id="B99">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ng</surname> <given-names>A.</given-names></name>
<name><surname>Jordan</surname> <given-names>M.</given-names></name>
<name><surname>Weiss</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2001</year>). 
<article-title>On spectral clustering: Analysis and an algorithm</article-title>. <source>Adv. Neural Inf. Process. Syst.</source> <volume>14</volume>.
</mixed-citation>
</ref>
<ref id="B100">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Nguyen</surname> <given-names>T. T.</given-names></name>
<name><surname>Slaughter</surname> <given-names>D. C.</given-names></name>
<name><surname>Maloof</surname> <given-names>J. N.</given-names></name>
<name><surname>Sinha</surname> <given-names>N.</given-names></name>
</person-group> (<year>2016</year>a). 
<article-title>Plant phenotyping using multi-view stereo vision with structured lights</article-title>. <source>Autonomous Air Ground Sens. Syst. Agric. Optimization Phenotyping</source> <volume>9866</volume>, <fpage>22</fpage>&#x2013;<lpage>30</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.13031/aim.20162444593</pub-id>
</mixed-citation>
</ref>
<ref id="B101">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Nguyen</surname> <given-names>T. T.</given-names></name>
<name><surname>Slaughter</surname> <given-names>D. C.</given-names></name>
<name><surname>Max</surname> <given-names>N.</given-names></name>
<name><surname>Maloof</surname> <given-names>J. N.</given-names></name>
<name><surname>Sinha</surname> <given-names>N.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Structured light-based 3D reconstruction system for plants</article-title>. <source>Sensors</source> <volume>15</volume>, <fpage>18587</fpage>&#x2013;<lpage>18612</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s150818587</pub-id>, PMID: <pub-id pub-id-type="pmid">26230701</pub-id>
</mixed-citation>
</ref>
<ref id="B102">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Nguyen</surname> <given-names>T. T.</given-names></name>
<name><surname>Slaughter</surname> <given-names>D. C.</given-names></name>
<name><surname>Townsley</surname> <given-names>B.</given-names></name>
<name><surname>Carriedo</surname> <given-names>L.</given-names></name>
<name><surname>Julin</surname> <given-names>N. N.</given-names></name>
<name><surname>Sinha</surname> <given-names>N.</given-names></name>
</person-group> (<year>2016</year>b). &#x201c;
<article-title>Comparison of structure-from-motion and stereo vision techniques for full in-field 3d reconstruction and phenotyping of plants: An investigation in sunflower</article-title>,&#x201d; in <conf-name>2016 ASABE Annual International Meeting</conf-name>, (<publisher-loc>St. Joseph, Michigan</publisher-loc>: 
<publisher-name>American Society of Agricultural and Biological Engineers (ASABE)</publisher-name>) Vol. <volume>1</volume>.
</mixed-citation>
</ref>
<ref id="B103">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ninomiya</surname> <given-names>S.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>High-throughput field crop phenotyping: current status and challenges</article-title>. <source>Breed. Sci.</source> <volume>72</volume>, <fpage>3</fpage>&#x2013;<lpage>18</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1270/jsbbs.21069</pub-id>, PMID: <pub-id pub-id-type="pmid">36045897</pub-id>
</mixed-citation>
</ref>
<ref id="B104">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Olesen</surname> <given-names>O. V.</given-names></name>
<name><surname>Paulsen</surname> <given-names>R. R.</given-names></name>
<name><surname>Hojgaard</surname> <given-names>L.</given-names></name>
<name><surname>Roed</surname> <given-names>B.</given-names></name>
<name><surname>Larsen</surname> <given-names>R.</given-names></name>
</person-group> (<year>2011</year>). 
<article-title>Motion tracking for medical imaging: a nonvisible structured light tracking approach</article-title>. <source>IEEE Trans. Med. Imaging</source> <volume>31</volume>, <fpage>79</fpage>&#x2013;<lpage>87</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TMI.2011.2165157</pub-id>, PMID: <pub-id pub-id-type="pmid">21859614</pub-id>
</mixed-citation>
</ref>
<ref id="B105">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Padmanabhan</surname> <given-names>P.</given-names></name>
<name><surname>Zhang</surname> <given-names>C.</given-names></name>
<name><surname>Charbon</surname> <given-names>E.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Modeling and analysis of a direct time-of-flight sensor architecture for LiDAR applications</article-title>. <source>Sensors</source> <volume>19</volume>, <fpage>5464</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s19245464</pub-id>, PMID: <pub-id pub-id-type="pmid">31835807</pub-id>
</mixed-citation>
</ref>
<ref id="B106">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Pan</surname> <given-names>L.</given-names></name>
<name><surname>Liu</surname> <given-names>L.</given-names></name>
<name><surname>Condon</surname> <given-names>A. G.</given-names></name>
<name><surname>Estavillo</surname> <given-names>G. M.</given-names></name>
<name><surname>Coe</surname> <given-names>R. A.</given-names></name>
<name><surname>Bull</surname> <given-names>G.</given-names></name>
<etal/>
</person-group>. (<year>2022</year>). &#x201c;
<article-title>Biomass prediction with 3D point clouds from LiDAR</article-title>,&#x201d; in <conf-name>Proceedings of the IEEE/CVF Winter Conference on Applications of Computer Vision</conf-name>. (<publisher-loc>Piscataway, NJ</publisher-loc>: 
<publisher-name>IEEE</publisher-name>) <fpage>1330</fpage>&#x2013;<lpage>1340</lpage>.
</mixed-citation>
</ref>
<ref id="B107">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Panjvani</surname> <given-names>K.</given-names></name>
<name><surname>Dinh</surname> <given-names>A. V.</given-names></name>
<name><surname>Wahid</surname> <given-names>K. A.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>LiDARPheno&#x2013;A low-cost LiDAR-based 3D scanning system for leaf morphological trait extraction</article-title>. <source>Front. Plant Sci.</source> <volume>10</volume>, <elocation-id>147</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2019.00147</pub-id>, PMID: <pub-id pub-id-type="pmid">30815008</pub-id>
</mixed-citation>
</ref>
<ref id="B108">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Patel</surname> <given-names>A. K.</given-names></name>
<name><surname>Park</surname> <given-names>E.-S.</given-names></name>
<name><surname>Lee</surname> <given-names>H.</given-names></name>
<name><surname>Priya</surname> <given-names>G. G. L.</given-names></name>
<name><surname>Kim</surname> <given-names>H.</given-names></name>
<name><surname>Joshi</surname> <given-names>R.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Deep learning-based plant organ segmentation and phenotyping of sorghum plants using LiDAR point cloud</article-title>. <source>IEEE J. Selected Topics Appl. Earth Observations Remote Sensing</source>. <volume>16</volume>, <fpage>8492</fpage>&#x2013;<lpage>8507</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/JSTARS.2023.3312815</pub-id>
</mixed-citation>
</ref>
<ref id="B109">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Paturkar</surname> <given-names>A.</given-names></name>
<name><surname>Sen Gupta</surname> <given-names>G.</given-names></name>
<name><surname>Bailey</surname> <given-names>D.</given-names></name>
</person-group> (<year>2019</year>). &#x201c;
<article-title>3D reconstruction of plants under outdoor conditions using image-based computer vision</article-title>,&#x201d; in <conf-name>Recent Trends in Image Processing and Pattern Recognition: Second International Conference, RTIP2R 2018</conf-name>, <conf-loc>Solapur, India</conf-loc>, <conf-date>December 21&#x2013;22, 2018</conf-date>, (<publisher-loc>Singapore</publisher-loc>: 
<publisher-name>Springer</publisher-name>) Vol. <volume>2</volume>. <fpage>284</fpage>&#x2013;<lpage>297</lpage>, Revised Selected Papers, Part III.
</mixed-citation>
</ref>
<ref id="B110">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Paulus</surname> <given-names>S.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Measuring crops in 3D: using geometry for plant phenotyping</article-title>. <source>Plant Methods</source> <volume>15</volume>, <fpage>103</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s13007-019-0490-0</pub-id>, PMID: <pub-id pub-id-type="pmid">31497064</pub-id>
</mixed-citation>
</ref>
<ref id="B111">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Paulus</surname> <given-names>S.</given-names></name>
<name><surname>Dupuis</surname> <given-names>J.</given-names></name>
<name><surname>Mahlein</surname> <given-names>A.-K.</given-names></name>
<name><surname>Kuhlmann</surname> <given-names>H.</given-names></name>
</person-group> (<year>2013</year>). 
<article-title>Surface feature based classification of plant organs from 3D laserscanned point clouds for plant phenotyping</article-title>. <source>BMC Bioinf.</source> <volume>14</volume>, <fpage>1</fpage>&#x2013;<lpage>12</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/1471-2105-14-238</pub-id>, PMID: <pub-id pub-id-type="pmid">23890277</pub-id>
</mixed-citation>
</ref>
<ref id="B112">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Paulus</surname> <given-names>S.</given-names></name>
<name><surname>Eichert</surname> <given-names>T.</given-names></name>
<name><surname>Goldbach</surname> <given-names>H. E.</given-names></name>
<name><surname>Kuhlmann</surname> <given-names>H.</given-names></name>
</person-group> (<year>2014</year>). 
<article-title>Limits of active laser triangulation as an instrument for high precision plant imaging</article-title>. <source>Sensors</source> <volume>14</volume>, <fpage>2489</fpage>&#x2013;<lpage>2509</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s140202489</pub-id>, PMID: <pub-id pub-id-type="pmid">24504106</pub-id>
</mixed-citation>
</ref>
<ref id="B113">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Paulus</surname> <given-names>S.</given-names></name>
<name><surname>Jens</surname> <given-names>L.&#xc3;.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>High-precision laser scanning system for capturing 3D plant architecture and analysing growth of cereal plants</article-title>. <volume>121</volume>, <fpage>1</fpage>&#x2013;<lpage>11</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2014.01.010</pub-id>
</mixed-citation>
</ref>
<ref id="B114">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Peruzzo</surname> <given-names>L.</given-names></name>
<name><surname>Chou</surname> <given-names>C.</given-names></name>
<name><surname>Wu</surname> <given-names>Y.</given-names></name>
<name><surname>Schmutz</surname> <given-names>M.</given-names></name>
<name><surname>Mary</surname> <given-names>B.</given-names></name>
<name><surname>Wagner</surname> <given-names>F. M.</given-names></name>
<etal/>
</person-group>. (<year>2020</year>). 
<article-title>Imaging of plant current pathways for non-invasive root Phenotyping using a newly developed electrical current source density approach</article-title>. <source>Plant Soil</source> <volume>450</volume>, <fpage>567</fpage>&#x2013;<lpage>584</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11104-020-04529-w</pub-id>
</mixed-citation>
</ref>
<ref id="B115">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Polder</surname> <given-names>G.</given-names></name>
<name><surname>Hofstee</surname> <given-names>J. W.</given-names></name>
</person-group> (<year>2014</year>). &#x201c;
<article-title>Phenotyping large tomato plants in the greenhouse using a 3D light-field camera</article-title>,&#x201d; in <conf-name>2014 Montreal</conf-name>, <conf-loc>Quebec Canada</conf-loc>, <conf-date>July 13&#x2013;July 16, 2014</conf-date>. (<publisher-loc>St. Joseph, Michigan</publisher-loc>: 
<publisher-name>American Society of Agricultural and Biological Engineers (ASABE)</publisher-name>) <fpage>1</fpage>.
</mixed-citation>
</ref>
<ref id="B116">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Poorter</surname> <given-names>H.</given-names></name>
<name><surname>Fiorani</surname> <given-names>F.</given-names></name>
<name><surname>Pieruschka</surname> <given-names>R.</given-names></name>
<name><surname>Wojciechowski</surname> <given-names>T.</given-names></name>
<name><surname>van der Putten</surname> <given-names>W. H.</given-names></name>
<name><surname>Kleyer</surname> <given-names>M.</given-names></name>
<etal/>
</person-group>. (<year>2016</year>). 
<article-title>Pampered inside, pestered outside? Differences and similarities between plants growing in controlled conditions and in the field</article-title>. <source>New Phytol.</source> <volume>212</volume>, <fpage>838</fpage>&#x2013;<lpage>855</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/nph.14243</pub-id>, PMID: <pub-id pub-id-type="pmid">27783423</pub-id>
</mixed-citation>
</ref>
<ref id="B117">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Preuksakarn</surname> <given-names>C.</given-names></name>
<name><surname>Boudon</surname> <given-names>F.</given-names></name>
<name><surname>Ferraro</surname> <given-names>P.</given-names></name>
<name><surname>Durand</surname> <given-names>J.-B.</given-names></name>
<name><surname>Nikinmaa</surname> <given-names>E.</given-names></name>
<name><surname>Godin</surname> <given-names>C.</given-names></name>
</person-group> (<year>2010</year>). &#x201c;
<article-title>Reconstructing plant architecture from 3D laser scanner data</article-title>,&#x201d; in <conf-name>6th International Workshop on Functional-Structural Plant Models</conf-name>. (<publisher-loc>Davis, California</publisher-loc>: 
<publisher-name>University of California, Davis</publisher-name>) <fpage>12</fpage>&#x2013;<lpage>17</lpage>.
</mixed-citation>
</ref>
<ref id="B118">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Richardson</surname> <given-names>G. A.</given-names></name>
<name><surname>Lohani</surname> <given-names>H. K.</given-names></name>
<name><surname>Potnuru</surname> <given-names>C.</given-names></name>
<name><surname>Donepudi</surname> <given-names>L. P.</given-names></name>
<name><surname>Pankajakshan</surname> <given-names>P.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>PhenoBot: an automated system for leaf area analysis using deep learning</article-title>. <source>Planta</source> <volume>257</volume>, <fpage>36</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00425-023-04068-5</pub-id>, PMID: <pub-id pub-id-type="pmid">36627492</pub-id>
</mixed-citation>
</ref>
<ref id="B119">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Rose</surname> <given-names>J. C.</given-names></name>
<name><surname>Paulus</surname> <given-names>S.</given-names></name>
<name><surname>Kuhlmann</surname> <given-names>H.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Accuracy analysis of a multi-view stereo approach for phenotyping of tomato plants at the organ level</article-title>. <source>Sensors</source> <volume>15</volume>, <fpage>9651</fpage>&#x2013;<lpage>9665</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s150509651</pub-id>, PMID: <pub-id pub-id-type="pmid">25919368</pub-id>
</mixed-citation>
</ref>
<ref id="B120">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Rosell-Polo</surname> <given-names>J. R.</given-names></name>
<name><surname>Cheein</surname> <given-names>F. A.</given-names></name>
<name><surname>Gregorio</surname> <given-names>E.</given-names></name>
<name><surname>And&#xfa;jar</surname> <given-names>D.</given-names></name>
<name><surname>Puigdom&#xe8;nech</surname> <given-names>L.</given-names></name>
<name><surname>Masip</surname> <given-names>J.</given-names></name>
<etal/>
</person-group>. (<year>2015</year>). 
<article-title>Advances in structured light sensors applications in precision agriculture and livestock farming</article-title>. <source>Adv. Agron.</source> <volume>133</volume>, <fpage>71</fpage>&#x2013;<lpage>112</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/bs.agron.2015.05.002</pub-id>
</mixed-citation>
</ref>
<ref id="B121">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Rossi</surname> <given-names>R.</given-names></name>
<name><surname>Leolini</surname> <given-names>C.</given-names></name>
<name><surname>Costafreda-Aumedes</surname> <given-names>S.</given-names></name>
<name><surname>Leolini</surname> <given-names>L.</given-names></name>
<name><surname>Bindi</surname> <given-names>M.</given-names></name>
<name><surname>Zaldei</surname> <given-names>A.</given-names></name>
<etal/>
</person-group>. (<year>2020</year>). 
<article-title>Performances evaluation of a low-cost platform for high-resolution plant phenotyping</article-title>. <source>Sensors</source> <volume>20</volume>, <fpage>3150</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s20113150</pub-id>, PMID: <pub-id pub-id-type="pmid">32498361</pub-id>
</mixed-citation>
</ref>
<ref id="B122">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Ruckelshausen</surname> <given-names>A.</given-names></name>
<name><surname>Biber</surname> <given-names>P.</given-names></name>
<name><surname>Dorna</surname> <given-names>M.</given-names></name>
<name><surname>Gremmes</surname> <given-names>H.</given-names></name>
<name><surname>Klose</surname> <given-names>R.</given-names></name>
<name><surname>Linz</surname> <given-names>A.</given-names></name>
<etal/>
</person-group>. (<year>2009</year>). &#x201c;
<article-title>BoniRob: an autonomous field robot platform for individual plant phenotyping</article-title>,&#x201d; in <source>Precision agriculture&#x2019;09</source> (<publisher-loc>Wageningen, The Netherlands</publisher-loc>: 
<publisher-name>Wageningen Academic</publisher-name>), <fpage>841</fpage>&#x2013;<lpage>847</lpage>.
</mixed-citation>
</ref>
<ref id="B123">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Salvi</surname> <given-names>J.</given-names></name>
<name><surname>Fernandez</surname> <given-names>S.</given-names></name>
<name><surname>Pribanic</surname> <given-names>T.</given-names></name>
<name><surname>Llado</surname> <given-names>X.</given-names></name>
</person-group> (<year>2010</year>). 
<article-title>A state of the art in structured light patterns for surface profilometry</article-title>. <source>Pattern Recognition</source> <volume>43</volume>, <fpage>2666</fpage>&#x2013;<lpage>2680</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.patcog.2010.03.004</pub-id>
</mixed-citation>
</ref>
<ref id="B124">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Sampaio</surname> <given-names>G. S.</given-names></name>
<name><surname>Silva</surname> <given-names>L. A.</given-names></name>
<name><surname>Marengoni</surname> <given-names>M.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>3D reconstruction of non-rigid plants and sensor data fusion for agriculture phenotyping</article-title>. <source>Sensors</source> <volume>21</volume>, <fpage>4115</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s21124115</pub-id>, PMID: <pub-id pub-id-type="pmid">34203831</pub-id>
</mixed-citation>
</ref>
<ref id="B125">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Santos</surname> <given-names>T. T.</given-names></name>
<name><surname>Koenigkan</surname> <given-names>L. V.</given-names></name>
<name><surname>Barbedo</surname> <given-names>J. G. A.</given-names></name>
<name><surname>Rodrigues</surname> <given-names>G. C.</given-names></name>
</person-group> (<year>2015</year>). &#x201c;
<article-title>3D plant modeling: localization, mapping and segmentation for plant phenotyping using a single hand-held camera</article-title>,&#x201d; in <conf-name>Computer Vision-ECCV 2014 Workshops</conf-name>, <conf-loc>Zurich, Switzerland</conf-loc>, <conf-date>September 6&#x2013;7 and 12, 2014</conf-date>, (<publisher-loc>Cham, Switzerland</publisher-loc>: 
<publisher-name>Springer</publisher-name>) Vol. <volume>13</volume>. <fpage>247</fpage>&#x2013;<lpage>263</lpage>, Proceedings, Part IV.
</mixed-citation>
</ref>
<ref id="B126">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Scalcon</surname> <given-names>F. P.</given-names></name>
<name><surname>Tahal</surname> <given-names>R.</given-names></name>
<name><surname>Ahrabi</surname> <given-names>M.</given-names></name>
<name><surname>Huangfu</surname> <given-names>Y.</given-names></name>
<name><surname>Ahmed</surname> <given-names>R.</given-names></name>
<name><surname>Nahid-Mobarakeh</surname> <given-names>B.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). &#x201c;
<article-title>AI-powered video monitoring: assessing the NVIDIA jetson orin devices for edge computing applications</article-title>,&#x201d; in <conf-name>2024 IEEE Transportation Electrification Conference and Expo (ITEC)</conf-name>. (<publisher-loc>Piscataway, NJ</publisher-loc>: 
<publisher-name>IEEE</publisher-name>) <fpage>1</fpage>&#x2013;<lpage>6</lpage>.
</mixed-citation>
</ref>
<ref id="B127">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Schima</surname> <given-names>R.</given-names></name>
<name><surname>Mollenhauer</surname> <given-names>H.</given-names></name>
<name><surname>Grenzd&#xf6;rffer</surname> <given-names>G.</given-names></name>
<name><surname>Merbach</surname> <given-names>I.</given-names></name>
<name><surname>Lausch</surname> <given-names>A.</given-names></name>
<name><surname>Dietrich</surname> <given-names>P.</given-names></name>
<etal/>
</person-group>. (<year>2016</year>). 
<article-title>Imagine all the plants: Evaluation of a light-field camera for on-site crop growth monitoring</article-title>. <source>Remote Sens.</source> <volume>8</volume>, <fpage>823</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs8100823</pub-id>
</mixed-citation>
</ref>
<ref id="B128">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Schlarp</surname> <given-names>J.</given-names></name>
<name><surname>Csencsics</surname> <given-names>E.</given-names></name>
<name><surname>Schitter</surname> <given-names>G.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Optical scanning of a laser triangulation sensor for 3-D imaging</article-title>. <source>IEEE Trans. Instrumentation Measurement</source> <volume>69</volume>, <fpage>3606</fpage>&#x2013;<lpage>3613</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TIM.2019.2933343</pub-id>
</mixed-citation>
</ref>
<ref id="B129">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shafiekhani</surname> <given-names>A.</given-names></name>
<name><surname>Kadam</surname> <given-names>S.</given-names></name>
<name><surname>Fritschi</surname> <given-names>F. B.</given-names></name>
<name><surname>DeSouza</surname> <given-names>G. N.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>Vinobot and vinoculer: Two robotic platforms for high-throughput field phenotyping</article-title>. <source>Sensors</source> <volume>17</volume>, <fpage>214</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s17010214</pub-id>, PMID: <pub-id pub-id-type="pmid">28124976</pub-id>
</mixed-citation>
</ref>
<ref id="B130">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shi</surname> <given-names>J.</given-names></name>
<name><surname>Malik</surname> <given-names>J.</given-names></name>
</person-group> (<year>2000</year>). 
<article-title>Normalized cuts and image segmentation</article-title>. <source>IEEE Trans. Pattern Anal. Mach. Intell.</source> <volume>22</volume>, <fpage>888</fpage>&#x2013;<lpage>905</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/34.868688</pub-id>
</mixed-citation>
</ref>
<ref id="B131">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shi</surname> <given-names>Y.</given-names></name>
<name><surname>Thomasson</surname> <given-names>J. A.</given-names></name>
<name><surname>Murray</surname> <given-names>S. C.</given-names></name>
<name><surname>Pugh</surname> <given-names>N. A.</given-names></name>
<name><surname>Rooney</surname> <given-names>W. L.</given-names></name>
<name><surname>Shafian</surname> <given-names>S.</given-names></name>
<etal/>
</person-group>. (<year>2016</year>). 
<article-title>Unmanned aerial vehicles for high-throughput phenotyping and agronomic research</article-title>. <source>PloS One</source> <volume>11</volume>, <elocation-id>e0159781</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pone.0159781</pub-id>, PMID: <pub-id pub-id-type="pmid">27472222</pub-id>
</mixed-citation>
</ref>
<ref id="B132">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Singh</surname> <given-names>G.</given-names></name>
<name><surname>Nandakumar</surname> <given-names>M. P.</given-names></name>
<name><surname>Ashok</surname> <given-names>S.</given-names></name>
</person-group> (<year>2016</year>). &#x201c;
<article-title>Adaptive fuzzy-PID and neural network based object tracking using a 3-axis platform</article-title>,&#x201d; in <conf-name>2016 IEEE International Conference on Engineering and Technology (ICETECH)</conf-name>. (<publisher-loc>Piscataway, NJ</publisher-loc>: 
<publisher-name>IEEE</publisher-name>) <fpage>1012</fpage>&#x2013;<lpage>1017</lpage>.
</mixed-citation>
</ref>
<ref id="B133">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Sodhi</surname> <given-names>P.</given-names></name>
<name><surname>Vijayarangan</surname> <given-names>S.</given-names></name>
<name><surname>Wettergreen</surname> <given-names>D.</given-names></name>
</person-group> (<year>2017</year>). &#x201c;
<article-title>In-field segmentation and identification of plant structures using 3D imaging</article-title>,&#x201d; in <conf-name>2017 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</conf-name>. (<publisher-loc>Piscataway, NJ</publisher-loc>: 
<publisher-name>IEEE</publisher-name>) <fpage>5180</fpage>&#x2013;<lpage>5187</lpage>.
</mixed-citation>
</ref>
<ref id="B134">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Song</surname> <given-names>Y.</given-names></name>
<name><surname>Glasbey</surname> <given-names>C. A.</given-names></name>
<name><surname>van der Heijden</surname> <given-names>G. W. A. M.</given-names></name>
<name><surname>Polder</surname> <given-names>G.</given-names></name>
<name><surname>Dieleman</surname> <given-names>J. A.</given-names></name>
</person-group> (<year>2011</year>). &#x201c;
<article-title>Combining stereo and time-of-flight images with application to automatic plant phenotyping</article-title>,&#x201d; in <conf-name>Image Analysis: 17th Scandinavian Conference, SCIA 2011</conf-name>, <conf-loc>Ystad, Sweden</conf-loc>, <conf-date>May 2011</conf-date>, (<publisher-loc>Berlin, Heidelberg</publisher-loc>: 
<publisher-name>Springer</publisher-name>) Vol. <volume>17</volume>. <fpage>467</fpage>&#x2013;<lpage>478</lpage>, Proceedings.
</mixed-citation>
</ref>
<ref id="B135">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Song</surname> <given-names>P.</given-names></name>
<name><surname>Li</surname> <given-names>Z.</given-names></name>
<name><surname>Yang</surname> <given-names>M.</given-names></name>
<name><surname>Shao</surname> <given-names>Y.</given-names></name>
<name><surname>Pu</surname> <given-names>Z.</given-names></name>
<name><surname>Yang</surname> <given-names>W.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Dynamic detection of three-dimensional crop phenotypes based on a consumer-grade RGB-D camera</article-title>. <source>Front. Plant Sci.</source> <volume>14</volume>, <elocation-id>1097725</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2023.1097725</pub-id>, PMID: <pub-id pub-id-type="pmid">36778701</pub-id>
</mixed-citation>
</ref>
<ref id="B136">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Su</surname> <given-names>Y.</given-names></name>
<name><surname>Wu</surname> <given-names>F.</given-names></name>
<name><surname>Ao</surname> <given-names>Z.</given-names></name>
<name><surname>Jin</surname> <given-names>S.</given-names></name>
<name><surname>Qin</surname> <given-names>F.</given-names></name>
<name><surname>Liu</surname> <given-names>B.</given-names></name>
<etal/>
</person-group>. (<year>2019</year>). 
<article-title>Evaluating maize&#xa0;phenotype dynamics under drought stress using terrestrial lidar</article-title>. <source>Plant Methods</source> <volume>15</volume>, <fpage>1</fpage>&#x2013;<lpage>16</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s13007-019-0396-x</pub-id>, PMID: <pub-id pub-id-type="pmid">30740137</pub-id>
</mixed-citation>
</ref>
<ref id="B137">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Sun</surname> <given-names>S.</given-names></name>
<name><surname>Li</surname> <given-names>C.</given-names></name>
<name><surname>Paterson</surname> <given-names>A. H.</given-names></name>
<name><surname>Jiang</surname> <given-names>Y.</given-names></name>
<name><surname>Xu</surname> <given-names>R.</given-names></name>
<name><surname>Robertson</surname> <given-names>J. S.</given-names></name>
<etal/>
</person-group>. (<year>2018</year>). 
<article-title>In-field high throughput phenotyping and cotton plant growth analysis using LiDAR</article-title>. <source>Front. Plant Sci.</source> <volume>9</volume>, <elocation-id>16</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2018.00016</pub-id>, PMID: <pub-id pub-id-type="pmid">29403522</pub-id>
</mixed-citation>
</ref>
<ref id="B138">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Syu</surname> <given-names>J.-H.</given-names></name>
<name><surname>Lin</surname> <given-names>J. C.-W.</given-names></name>
<name><surname>Srivastava</surname> <given-names>G.</given-names></name>
<name><surname>Yu</surname> <given-names>K.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>A comprehensive survey&#xa0;on artificial intelligence empowered edge computing on consumer electronics</article-title>.&#xa0;<source>IEEE Trans. Consumer Electronics</source>. <volume>69</volume>, <fpage>1023</fpage>&#x2013;<lpage>1034</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TCE.2023.3318150</pub-id>
</mixed-citation>
</ref>
<ref id="B139">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Takahashi</surname> <given-names>H.</given-names></name>
<name><surname>Pradal</surname> <given-names>C.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Root phenotyping: important and minimum information required for root modeling in crop plants</article-title>. <source>Breed. Sci.</source> <volume>71</volume>, <fpage>109</fpage>&#x2013;<lpage>116</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1270/jsbbs.20126</pub-id>, PMID: <pub-id pub-id-type="pmid">33762880</pub-id>
</mixed-citation>
</ref>
<ref id="B140">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tanaka</surname> <given-names>T. S. T.</given-names></name>
<name><surname>Wang</surname> <given-names>S.</given-names></name>
<name><surname>J&#xf8;rgensen</surname> <given-names>J. R.</given-names></name>
<name><surname>Gentili</surname> <given-names>M.</given-names></name>
<name><surname>Vidal</surname> <given-names>A. Z.</given-names></name>
<name><surname>Mortensen</surname> <given-names>A. K.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Review of crop phenotyping in field plot experiments using UAV-mounted sensors and algorithms</article-title>. <source>Drones</source> <volume>8</volume>, <fpage>212</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/drones8060212</pub-id>
</mixed-citation>
</ref>
<ref id="B141">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Thapa</surname> <given-names>S.</given-names></name>
<name><surname>Zhu</surname> <given-names>F.</given-names></name>
<name><surname>Walia</surname> <given-names>H.</given-names></name>
<name><surname>Yu</surname> <given-names>H.</given-names></name>
<name><surname>Ge</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>A novel LiDAR-based instrument for high-throughput, 3D measurement of morphological traits in maize and sorghum</article-title>. <source>Sensors</source> <volume>18</volume>, <fpage>1187</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s18041187</pub-id>, PMID: <pub-id pub-id-type="pmid">29652788</pub-id>
</mixed-citation>
</ref>
<ref id="B142">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Vadez</surname> <given-names>V.</given-names></name>
<name><surname>Kholov&#xe1;</surname> <given-names>J.</given-names></name>
<name><surname>Hummel</surname> <given-names>G.</given-names></name>
<name><surname>Zhokhavets</surname> <given-names>U.</given-names></name>
<name><surname>Gupta</surname> <given-names>S. K.</given-names></name>
<name><surname>Hash</surname> <given-names>C. T.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>LeasyScan: a novel concept combining 3D imaging and lysimetry for high-throughput phenotyping of traits controlling plant water budget</article-title>. <source>J. Exp. Bot.</source> <volume>66</volume>, <fpage>5581</fpage>&#x2013;<lpage>5593</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/jxb/erv251</pub-id>, PMID: <pub-id pub-id-type="pmid">26034130</pub-id>
</mixed-citation>
</ref>
<ref id="B143">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Vandenberghe</surname> <given-names>B.</given-names></name>
<name><surname>Depuydt</surname> <given-names>S.</given-names></name>
<name><surname>Van Messem</surname> <given-names>A.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>How to make sense of 3D representations for plant phenotyping: a compendium of processing and analysis techniques</article-title>. <source>Plant Methods</source>. <volume>19</volume>, <fpage>60</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.31219/osf.io/r84mk</pub-id>, PMID: <pub-id pub-id-type="pmid">37353846</pub-id>
</mixed-citation>
</ref>
<ref id="B144">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Van Genechten</surname> <given-names>B.</given-names></name>
</person-group> (<year>2008</year>). 
<article-title>Theory and practice on Terrestrial Laser Scanning: Training material based on practical applications</article-title>.
</mixed-citation>
</ref>
<ref id="B145">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>V&#xe1;zquez-Arellano</surname> <given-names>M.</given-names></name>
<name><surname>Reiser</surname> <given-names>D.</given-names></name>
<name><surname>Paraforos</surname> <given-names>D. S.</given-names></name>
<name><surname>Garrido-Izard</surname> <given-names>M.</given-names></name>
<name><surname>Burce</surname> <given-names>M. E. C.</given-names></name>
<name><surname>Griepentrog</surname> <given-names>H. W.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>3-D reconstruction of maize plants using a time-of-flight camera</article-title>. <source>Comput. Electron. Agric.</source> <volume>145</volume>, <fpage>235</fpage>&#x2013;<lpage>247</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2018.01.002</pub-id>
</mixed-citation>
</ref>
<ref id="B146">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Virlet</surname> <given-names>N.</given-names></name>
<name><surname>Sabermanesh</surname> <given-names>K.</given-names></name>
<name><surname>Sadeghi-Tehran</surname> <given-names>P.</given-names></name>
<name><surname>Hawkesford</surname> <given-names>M. J.</given-names></name>
</person-group> (<year>2016</year>). 
<article-title>Field Scanalyzer: An automated robotic field phenotyping platform for detailed crop monitoring</article-title>. <source>Funct. Plant Biol.</source> <volume>44</volume>, <fpage>143</fpage>&#x2013;<lpage>153</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1071/FP16163</pub-id>, PMID: <pub-id pub-id-type="pmid">32480553</pub-id>
</mixed-citation>
</ref>
<ref id="B147">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Von Luxburg</surname> <given-names>U.</given-names></name>
</person-group> (<year>2007</year>). 
<article-title>A tutorial on spectral clustering</article-title>. <source>Stat Computing</source> <volume>17</volume>, <fpage>395</fpage>&#x2013;<lpage>416</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11222-007-9033-z</pub-id>
</mixed-citation>
</ref>
<ref id="B148">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wan</surname> <given-names>Z.-R.</given-names></name>
<name><surname>Lai</surname> <given-names>L.-J.</given-names></name>
<name><surname>Mao</surname> <given-names>J.</given-names></name>
<name><surname>Zhu</surname> <given-names>L.-M.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Extraction and segmentation method of laser stripe in linear structured light scanner</article-title>. <source>Optical Eng.</source> <volume>60</volume>, <fpage>46104</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1117/1.OE.60.4.046104</pub-id>
</mixed-citation>
</ref>
<ref id="B149">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wang</surname> <given-names>Y.</given-names></name>
<name><surname>Wen</surname> <given-names>W.</given-names></name>
<name><surname>Wu</surname> <given-names>S.</given-names></name>
<name><surname>Wang</surname> <given-names>C.</given-names></name>
<name><surname>Yu</surname> <given-names>Z.</given-names></name>
<name><surname>Guo</surname> <given-names>X.</given-names></name>
<etal/>
</person-group>. (<year>2018</year>). 
<article-title>Maize plant phenotyping: comparing 3D laser scanning, multi-view stereo reconstruction, and 3D digitizing estimates</article-title>. <source>Remote Sens.</source> <volume>11</volume>, <fpage>63</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs11010063</pub-id>
</mixed-citation>
</ref>
<ref id="B150">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wang</surname> <given-names>H.</given-names></name>
<name><surname>Zhu</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Optimal exposure selection of high dynamic range-based reflection compensation for printed circuit board reconstruction using structured light 3D measurement system</article-title>. <source>MIPPR 2023: Pattern Recognition Comput. Vision</source> <volume>13086</volume>, <fpage>77</fpage>&#x2013;<lpage>83</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1117/12.2692724</pub-id>
</mixed-citation>
</ref>
<ref id="B151">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wasaya</surname> <given-names>A.</given-names></name>
<name><surname>Zhang</surname> <given-names>X.</given-names></name>
<name><surname>Fang</surname> <given-names>Q.</given-names></name>
<name><surname>Yan</surname> <given-names>Z.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>Root phenotyping for drought tolerance: a review</article-title>. <source>Agronomy</source> <volume>8</volume>, <fpage>241</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy8110241</pub-id>
</mixed-citation>
</ref>
<ref id="B152">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wu</surname> <given-names>J.</given-names></name>
<name><surname>Guo</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2014</year>). 
<article-title>An integrated method for quantifying root architecture of field-grown maize</article-title>. <source>Ann. Bot.</source> <volume>114</volume>, <fpage>841</fpage>&#x2013;<lpage>851</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/aob/mcu009</pub-id>, PMID: <pub-id pub-id-type="pmid">24532646</pub-id>
</mixed-citation>
</ref>
<ref id="B153">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wu</surname> <given-names>G.</given-names></name>
<name><surname>Masia</surname> <given-names>B.</given-names></name>
<name><surname>Jarabo</surname> <given-names>A.</given-names></name>
<name><surname>Zhang</surname> <given-names>Y.</given-names></name>
<name><surname>Wang</surname> <given-names>L.</given-names></name>
<name><surname>Dai</surname> <given-names>Q.</given-names></name>
<etal/>
</person-group>. (<year>2017</year>). 
<article-title>Light field image processing: An overview</article-title>. <source>IEEE J. Selected Topics Signal Process.</source> <volume>11</volume>, <fpage>926</fpage>&#x2013;<lpage>954</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/JSTSP.2017.2747126</pub-id>
</mixed-citation>
</ref>
<ref id="B154">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wu</surname> <given-names>S.</given-names></name>
<name><surname>Wen</surname> <given-names>W.</given-names></name>
<name><surname>Wang</surname> <given-names>Y.</given-names></name>
<name><surname>Fan</surname> <given-names>J.</given-names></name>
<name><surname>Wang</surname> <given-names>C.</given-names></name>
<name><surname>Gou</surname> <given-names>W.</given-names></name>
<etal/>
</person-group>. (<year>2020</year>). 
<article-title>MVS-Pheno: a portable and low-cost phenotyping platform for maize shoots using multiview stereo 3D reconstruction</article-title>. <source>Plant Phenomics</source>. <volume>2020</volume>, <page-range>1&#x2013;17</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.34133/2020/1848437</pub-id>, PMID: <pub-id pub-id-type="pmid">33313542</pub-id>
</mixed-citation>
</ref>
<ref id="B155">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xiang</surname> <given-names>L.</given-names></name>
<name><surname>Gai</surname> <given-names>J.</given-names></name>
<name><surname>Bao</surname> <given-names>Y.</given-names></name>
<name><surname>Yu</surname> <given-names>J.</given-names></name>
<name><surname>Schnable</surname> <given-names>P. S.</given-names></name>
<name><surname>Tang</surname> <given-names>L.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Field-based robotic leaf angle detection and characterization of maize plants using stereo vision and deep convolutional neural networks</article-title>. <source>J. Field Robotics</source> <volume>40</volume>, <fpage>1034</fpage>&#x2013;<lpage>1053</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/rob.22166</pub-id>
</mixed-citation>
</ref>
<ref id="B156">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xu</surname> <given-names>W.</given-names></name>
<name><surname>Yang</surname> <given-names>Z.</given-names></name>
<name><surname>Ng</surname> <given-names>D. W. K.</given-names></name>
<name><surname>Levorato</surname> <given-names>M.</given-names></name>
<name><surname>Eldar</surname> <given-names>Y. C.</given-names></name>
<name><surname>Debbah</surname> <given-names>M.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Edge learning for B5G networks with distributed signal processing: Semantic communication, edge computing, and wireless sensing</article-title>. <source>IEEE J. Selected Topics Signal Process.</source> <volume>17</volume>, <fpage>9</fpage>&#x2013;<lpage>39</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/JSTSP.2023.3239189</pub-id>
</mixed-citation>
</ref>
<ref id="B157">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yang</surname> <given-names>M.</given-names></name>
<name><surname>Cho</surname> <given-names>S.-I.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>High-resolution 3D crop reconstruction and automatic analysis of phenotyping index using machine learning</article-title>. <source>Agriculture</source> <volume>11</volume>, <fpage>1010</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture11101010</pub-id>
</mixed-citation>
</ref>
<ref id="B158">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yang</surname> <given-names>D.</given-names></name>
<name><surname>Yang</surname> <given-names>H.</given-names></name>
<name><surname>Liu</surname> <given-names>D.</given-names></name>
<name><surname>Wang</surname> <given-names>X.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Research on automatic 3D reconstruction of plant phenotype based on Multi-View images</article-title>. <source>Comput. Electron. Agric.</source> <volume>220</volume>, <fpage>108866</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.108866</pub-id>
</mixed-citation>
</ref>
<ref id="B159">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Yoon</surname> <given-names>S.-C.</given-names></name>
<name><surname>Thai</surname> <given-names>C. N.</given-names></name>
</person-group> (<year>2010</year>). &#x201c;
<article-title>Stereo spectral imaging system for plant health characterization</article-title>,&#x201d; in <source>Technological developments in networking, education and automation</source> (<publisher-loc>Vukovar, Croatia</publisher-loc>: 
<publisher-name>InTech</publisher-name>), <fpage>181</fpage>&#x2013;<lpage>186</lpage>.
</mixed-citation>
</ref>
<ref id="B160">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Young</surname> <given-names>S. N.</given-names></name>
<name><surname>Kayacan</surname> <given-names>E.</given-names></name>
<name><surname>Peschel</surname> <given-names>J. M.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Design and field evaluation of a ground robot for high-throughput phenotyping of energy sorghum</article-title>. <source>Precis. Agric.</source> <volume>20</volume>, <fpage>697</fpage>&#x2013;<lpage>722</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11119-018-9601-6</pub-id>
</mixed-citation>
</ref>
<ref id="B161">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yuan</surname> <given-names>W.</given-names></name>
<name><surname>Li</surname> <given-names>J.</given-names></name>
<name><surname>Bhatta</surname> <given-names>M.</given-names></name>
<name><surname>Shi</surname> <given-names>Y.</given-names></name>
<name><surname>Baenziger</surname> <given-names>P. S.</given-names></name>
<name><surname>Ge</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>Wheat height estimation using LiDAR in comparison to ultrasonic sensor and UAS</article-title>. <source>Sensors</source> <volume>18</volume>, <fpage>3731</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s18113731</pub-id>, PMID: <pub-id pub-id-type="pmid">30400154</pub-id>
</mixed-citation>
</ref>
<ref id="B162">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Zegada-Lizarazu</surname> <given-names>W.</given-names></name>
<name><surname>Wullschleger</surname> <given-names>S. D.</given-names></name>
<name><surname>Surendran Nair</surname> <given-names>S.</given-names></name>
<name><surname>Monti</surname> <given-names>A.</given-names></name>
</person-group> (<year>2012</year>). &#x201c;
<article-title>Crop physiology</article-title>,&#x201d; in <source>Switchgrass: A Valuable Biomass Crop for Energy</source>, (<publisher-loc>London</publisher-loc>: 
<publisher-name>Springer-Verlag</publisher-name>) <fpage>55</fpage>&#x2013;<lpage>86</lpage>.
</mixed-citation>
</ref>
<ref id="B163">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zermas</surname> <given-names>D.</given-names></name>
<name><surname>Morellas</surname> <given-names>V.</given-names></name>
<name><surname>Mulla</surname> <given-names>D.</given-names></name>
<name><surname>Papanikolopoulos</surname> <given-names>N.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>3D model processing for high throughput phenotype extraction&#x2013;the case of corn</article-title>. <source>Comput. Electron. Agric.</source> <volume>172</volume>, <fpage>105047</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2019.105047</pub-id>
</mixed-citation>
</ref>
<ref id="B164">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>S.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>High-speed 3D shape measurement with structured light&#xa0;methods:&#xa0;A review</article-title>. <source>Optics Lasers Eng.</source> <volume>106</volume>, <fpage>119</fpage>&#x2013;<lpage>131</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.optlaseng.2018.02.017</pub-id>
</mixed-citation>
</ref>
<ref id="B165">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>M.</given-names></name>
<name><surname>Li</surname> <given-names>W.</given-names></name>
<name><surname>Tao</surname> <given-names>R.</given-names></name>
<name><surname>Li</surname> <given-names>H.</given-names></name>
<name><surname>Du</surname> <given-names>Q.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Information fusion for classification of hyperspectral and LiDAR data using IP-CNN</article-title>. <source>IEEE Trans. Geosci. Remote Sens.</source> <volume>60</volume>, <fpage>1</fpage>&#x2013;<lpage>12</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TGRS.2021.3135010</pub-id>
</mixed-citation>
</ref>
<ref id="B166">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>Y.</given-names></name>
<name><surname>Zhang</surname> <given-names>N.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>Imaging technologies for plant high-throughput phenotyping: A review</article-title>. <source>Front. Agric. Sci. Eng.</source> <volume>5</volume>, <fpage>406</fpage>&#x2013;<lpage>419</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.15302/J-FASE-2018242</pub-id>
</mixed-citation>
</ref>
<ref id="B167">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhou</surname> <given-names>J.</given-names></name>
<name><surname>Pan</surname> <given-names>L.</given-names></name>
<name><surname>Li</surname> <given-names>Y.</given-names></name>
<name><surname>Du</surname> <given-names>R.</given-names></name>
<name><surname>Zhang</surname> <given-names>F.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Data-driven laser plane optimization for high-precision numerical calibration of line structured light sensors</article-title>. <source>IEEE Access</source> <volume>9</volume>, <fpage>57404</fpage>&#x2013;<lpage>57413</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2021.3072662</pub-id>
</mixed-citation>
</ref>
<ref id="B168">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhu</surname> <given-names>Y.</given-names></name>
<name><surname>Sun</surname> <given-names>G.</given-names></name>
<name><surname>Ding</surname> <given-names>G.</given-names></name>
<name><surname>Zhou</surname> <given-names>J.</given-names></name>
<name><surname>Wen</surname> <given-names>M.</given-names></name>
<name><surname>Jin</surname> <given-names>S.</given-names></name>
<etal/>
</person-group>. (<year>2021</year>). 
<article-title>Large-scale field phenotyping using backpack LiDAR and CropQuant-3D to measure structural variation in wheat</article-title>. <source>Plant Physiol.</source> <volume>187</volume>, <fpage>716</fpage>&#x2013;<lpage>738</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/plphys/kiab324</pub-id>, PMID: <pub-id pub-id-type="pmid">34608970</pub-id>
</mixed-citation>
</ref>
<ref id="B169">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhu</surname> <given-names>B.</given-names></name>
<name><surname>Zhang</surname> <given-names>Y.</given-names></name>
<name><surname>Sun</surname> <given-names>Y.</given-names></name>
<name><surname>Shi</surname> <given-names>Y.</given-names></name>
<name><surname>Ma</surname> <given-names>Y.</given-names></name>
<name><surname>Guo</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Quantitative estimation of organ-scale phenotypic parameters of field crops through 3D modeling using extremely low altitude UAV images</article-title>. <source>Comput. Electron. Agric.</source> <volume>210</volume>, <fpage>107910</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.107910</pub-id>
</mixed-citation>
</ref>
</ref-list>
<fn-group>
<fn id="n1" fn-type="custom" custom-type="edited-by">
<p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/172404">Alejandro Isabel Luna-Maldonado</ext-link>, Autonomous University of Nuevo Le&#xf3;n, Mexico</p></fn>
<fn id="n2" fn-type="custom" custom-type="reviewed-by">
<p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2556211">Wenyu Zhang</ext-link>, Jiangsu Academy of Agricultural Sciences Wuxi Branch, China</p>
<p><ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2960497">Jiale Feng</ext-link>, Northernvue Corporation, United States</p></fn>
</fn-group>
</back>
</article>