<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Plant Sci.</journal-id>
<journal-title>Frontiers in Plant Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Plant Sci.</abbrev-journal-title>
<issn pub-type="epub">1664-462X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpls.2025.1488760</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Plant Science</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Plant height measurement using UAV-based aerial RGB and LiDAR images in soybean</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name>
<surname>Pun Magar</surname>
<given-names>Lalit</given-names>
</name>
<uri xlink:href="https://loop.frontiersin.org/people/2830860"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Sandifer</surname>
<given-names>Jeremy</given-names>
</name>
<uri xlink:href="https://loop.frontiersin.org/people/2939768"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Khatri</surname>
<given-names>Deepak</given-names>
</name>
<uri xlink:href="https://loop.frontiersin.org/people/2857211"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Poudel</surname>
<given-names>Sudip</given-names>
</name>
<uri xlink:href="https://loop.frontiersin.org/people/2929258"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>KC</surname>
<given-names>Suraj</given-names>
</name>
<uri xlink:href="https://loop.frontiersin.org/people/2830881"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Gyawali</surname>
<given-names>Buddhi</given-names>
</name>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Gebremedhin</surname>
<given-names>Maheteme</given-names>
</name>
<uri xlink:href="https://loop.frontiersin.org/people/1574841"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Chiluwal</surname>
<given-names>Anuj</given-names>
</name>
<xref ref-type="author-notes" rid="fn001">
<sup>*</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1119419"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
</contrib>
</contrib-group>
<aff id="aff1">
<institution>College of Agriculture, Health, and Natural Resources, Kentucky State University</institution>, <addr-line>Frankfort, KY</addr-line>, <country>United States</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>Edited by: Zhenghong Yu, Guangdong Polytechnic of Science and Technology, China</p>
</fn>
<fn fn-type="edited-by">
<p>Reviewed by: Yikun Huang, Fujian Normal University, China</p>
<p>Zejun Zhang, Zhejiang Normal University, China</p>
</fn>
<fn fn-type="corresp" id="fn001">
<p>*Correspondence: Anuj Chiluwal, <email xlink:href="mailto:anuj.chiluwal@kysu.edu">anuj.chiluwal@kysu.edu</email>
</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>30</day>
<month>01</month>
<year>2025</year>
</pub-date>
<pub-date pub-type="collection">
<year>2025</year>
</pub-date>
<volume>16</volume>
<elocation-id>1488760</elocation-id>
<history>
<date date-type="received">
<day>30</day>
<month>08</month>
<year>2024</year>
</date>
<date date-type="accepted">
<day>13</day>
<month>01</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2025 Pun Magar, Sandifer, Khatri, Poudel, KC, Gyawali, Gebremedhin and Chiluwal</copyright-statement>
<copyright-year>2025</copyright-year>
<copyright-holder>Pun Magar, Sandifer, Khatri, Poudel, KC, Gyawali, Gebremedhin and Chiluwal</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<p>Phenotypic traits like plant height are crucial in assessing plant growth and physiological performance. Manual plant height measurement is labor and time-intensive, low throughput, and error-prone. Hence, aerial phenotyping using aerial imagery-based sensors combined with image processing technique is quickly emerging as a more effective alternative to estimate plant height and other morphophysiological parameters. Studies have demonstrated the effectiveness of both RGB and LiDAR images in estimating plant height in several crops. However, there is limited information on their comparison, especially in soybean (<italic>Glycine max</italic> [L.] Merr.). As a result, there is not enough information to decide on the appropriate sensor for plant height estimation in soybean. Hence, the study was conducted to identify the most effective sensor for high throughput aerial phenotyping to estimate plant height in soybean. Aerial images were collected in a field experiment at multiple time points during soybean growing season using an Unmanned Aerial Vehicle (UAV or drone) equipped with RGB and LiDAR sensors. Our method established the relationship between manually measured plant height and the height obtained from aerial platforms. We found that the LiDAR sensor had a better performance (R<sup>2</sup> = 0.83) than the RGB camera (R<sup>2</sup> = 0.53) when compared with ground reference height during pod growth and seed filling stages. However, RGB showed more reliability in estimating plant height at physiological maturity when the LiDAR could not capture an accurate plant height measurement. The results from this study contribute to identifying ideal aerial phenotyping sensors to estimate plant height in soybean during different growth stages.</p>
</abstract>
<kwd-group>
<kwd>soybean</kwd>
<kwd>plant height</kwd>
<kwd>high throughput aerial phenotyping</kwd>
<kwd>unmanned aerial vehicles</kwd>
<kwd>RGB</kwd>
<kwd>lidar</kwd>
</kwd-group>
<counts>
<fig-count count="10"/>
<table-count count="6"/>
<equation-count count="3"/>
<ref-count count="102"/>
<page-count count="16"/>
<word-count count="7387"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-in-acceptance</meta-name>
<meta-value>Technical Advances in Plant Science</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>Soybean (<italic>Glycine max</italic> (L.) Merrill) is a vital source of oil and plant protein globally, recognized for its high nutritional value (<xref ref-type="bibr" rid="B88">Wilcox, 2016</xref>). In the U.S., it ranks as the second most cultivated crop, following corn (<xref ref-type="bibr" rid="B81">Vaiknoras and Hubbs, 2023</xref>), and it plays a significant role in agricultural exports, with the U.S. being the second-largest soybean exporter, accounting for 38% of global soybean trade. To meet the growing global demand and maintain its status as a top exporter, the U.S. must significantly enhance soybean yield. Like other crops, yield in soybean is significantly influenced by a complex interaction of genetic traits, environmental factors, and agricultural practices. Key yield-related traits like pod number and seeds per pod (<xref ref-type="bibr" rid="B55">Ning et&#xa0;al., 2018</xref>), seed size (<xref ref-type="bibr" rid="B44">Liu et&#xa0;al., 2011</xref>), plant architecture like plant height (<xref ref-type="bibr" rid="B37">Jin et&#xa0;al., 2010</xref>), phenology (<xref ref-type="bibr" rid="B40">Kantolic and Slafer, 2001</xref>), photosynthetic efficiency (<xref ref-type="bibr" rid="B85">Wang et&#xa0;al., 2023</xref>), reproductive efficiency (<xref ref-type="bibr" rid="B77">Tischner et&#xa0;al., 2003</xref>) and nitrogen fixation efficiency (<xref ref-type="bibr" rid="B34">Imsande, 1992</xref>) are critical to influence final yield. Among these traits, plant height (PH) is one of the main critical yield-related traits in soybean, impacting the crop&#x2019;s ability to compete for light and, consequently, its overall productivity (<xref ref-type="bibr" rid="B25">Gaw&#x119;da et&#xa0;al., 2020</xref>). Defined as the distance from the ground to the top of the primary photosynthetic tissue (<xref ref-type="bibr" rid="B13">Cornelissen et&#xa0;al., 2003</xref>), PH impacts essential factors such as biomass (<xref ref-type="bibr" rid="B3">Bendig et&#xa0;al., 2014</xref>; <xref ref-type="bibr" rid="B74">Tilly et&#xa0;al., 2015</xref>; <xref ref-type="bibr" rid="B8">Brocks and Bareth, 2018</xref>), crop yield (<xref ref-type="bibr" rid="B93">Yin et&#xa0;al., 2011</xref>; <xref ref-type="bibr" rid="B66">Sharma et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B98">Zhang et&#xa0;al., 2017</xref>), and soil nutrient availability (<xref ref-type="bibr" rid="B92">Yin and McClure, 2013</xref>). This makes it a pivotal trait in plant breeding and crop improvement programs. Traditionally, measuring PH involves using rulers in the field, a method that is labor-intensive, time-consuming, and susceptible to errors, especially over extensive areas. These manual measurement techniques also suffer from spatial and temporal limitations that can compromise the accuracy of this vital plant phenotype data. To address these challenges, non-destructive image-based phenotyping has become increasingly popular, providing a more efficient and accurate means to assess PH.</p>
<p>Advancements in remote sensing have led to the exploration of various sensor-based methods for effective PH assessment. Passive sensors like satellites have been explored to measure PH in forests (<xref ref-type="bibr" rid="B60">Petrou et&#xa0;al., 2012</xref>) and crops like corn (<xref ref-type="bibr" rid="B23">Gao et&#xa0;al., 2013</xref>) and rice (<xref ref-type="bibr" rid="B19">Erten et&#xa0;al., 2016</xref>). Cloud cover and the revisit time of satellites (<xref ref-type="bibr" rid="B96">Zhang et&#xa0;al., 2020</xref>) can limit their effectiveness in precision agriculture. In response to these limitations, recent decades have seen a shift towards proximal field phenotyping technologies. Devices such as ultrasonic sensors, RGB depth cameras, and Terrestrial laser scanners fitted in fixed platforms, tractors, or autonomous robots have become prominent for high throughput field phenotyping. These technologies have proven successful in various crops, including cotton (<xref ref-type="bibr" rid="B35">Jiang et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B69">Sun et&#xa0;al., 2017</xref>, <xref ref-type="bibr" rid="B70">2018</xref>; <xref ref-type="bibr" rid="B73">Thompson et&#xa0;al., 2019</xref>), corn (<xref ref-type="bibr" rid="B30">H&#xe4;mmerle and H&#xf6;fle, 2016</xref>; <xref ref-type="bibr" rid="B62">Qiu et&#xa0;al., 2019</xref>), and soybean (<xref ref-type="bibr" rid="B49">Ma et&#xa0;al., 2019</xref>). However, these proximal sensor platforms face several challenges including high cost, limited area coverage, and reduced mobility as the crops reach advanced stages of growth (<xref ref-type="bibr" rid="B16">Deery et&#xa0;al., 2014</xref>).</p>
<p>To further enhance the scope and efficiency of phenotyping, high throughput aerial phenotyping (HTAP) using unmanned aerial vehicles (UAV) like drones has gained popularity. UAVs, equipped with various sensors provide rapid, extensive data collection capabilities. Among these, RGB-based photogrammetry has become a popular technique for estimating PH across different crop species, including cotton (<xref ref-type="bibr" rid="B89">Xu et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B91">Ye et&#xa0;al., 2023</xref>), wheat (<xref ref-type="bibr" rid="B50">Madec et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B41">Khan et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B94">Yuan et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B82">Volpato et&#xa0;al., 2021</xref>), maize (<xref ref-type="bibr" rid="B31">Han et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B52">Malambo et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B68">Su et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B24">Gao et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B46">Liu et&#xa0;al., 2024</xref>) and sorghum (<xref ref-type="bibr" rid="B86">Watanabe et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B79">Tunca et&#xa0;al., 2024</xref>). UAV-based RGB cameras are popularly used to estimate PH using the structure from motion (SfM) technique (<xref ref-type="bibr" rid="B39">Kalacska et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B12">Coops et&#xa0;al., 2021</xref>). The PH estimation techniques using RGB cameras are considered a low-cost and user-friendly approach (<xref ref-type="bibr" rid="B43">Li et&#xa0;al., 2019</xref>). However, some studies argued that the derived canopy height from the SfM technique showed some issues in height measurement (<xref ref-type="bibr" rid="B14">Cunliffe et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B87">Wijesingha et&#xa0;al., 2019</xref>). The overestimation of the digital surface model (DSM) by the RGB camera is attributed to its inability to penetrate the canopy and give precise information (<xref ref-type="bibr" rid="B50">Madec et&#xa0;al., 2017</xref>). Hence, the LiDAR technique is more popular for vertical structure measurement as its pulses have powerful penetration capacity (<xref ref-type="bibr" rid="B42">Lefsky et&#xa0;al., 2002</xref>). LiDAR is particularly noted for its capacity to provide detailed 3D structural information by penetrating dense canopies and differentiating between ground and non-ground points using multiple reflections of laser pulses (<xref ref-type="bibr" rid="B9">Calders et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B12">Coops et&#xa0;al., 2021</xref>). This technology has effectively estimated canopy height in forests, shrubs, and various crops (<xref ref-type="bibr" rid="B45">Liu et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B99">Zhao et&#xa0;al., 2022</xref>). Additionally, this technology has successfully predicted PH in many crops like cotton (<xref ref-type="bibr" rid="B69">Sun et&#xa0;al., 2017</xref>, <xref ref-type="bibr" rid="B70">2018</xref>; <xref ref-type="bibr" rid="B73">Thompson et&#xa0;al., 2019</xref>), wheat (<xref ref-type="bibr" rid="B50">Madec et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B94">Yuan et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B29">Guo et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B72">ten Harkel et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B5">Blanquart et&#xa0;al., 2020</xref>), maize (<xref ref-type="bibr" rid="B1">And&#xfa;jar et&#xa0;al., 2013</xref>; <xref ref-type="bibr" rid="B100">Zhou et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B24">Gao et&#xa0;al., 2022</xref>), sorghum (<xref ref-type="bibr" rid="B32">Hu et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B84">Wang et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B83">Waliman and Zakhor, 2020</xref>; <xref ref-type="bibr" rid="B59">Patel et&#xa0;al., 2023</xref>) and rice (<xref ref-type="bibr" rid="B75">Tilly et&#xa0;al., 2014a</xref>; <xref ref-type="bibr" rid="B61">Phan and Takahashi, 2021</xref>; <xref ref-type="bibr" rid="B71">Sun et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B38">Jing et&#xa0;al., 2023</xref>).</p>
<p>Despite these advancements, there remains a gap in comprehensive UAV-based HTAP studies specifically for estimating soybean height. Previous studies using an imaging system of RGB camera and photonic mixer detector (PMD) have provided valuable insights into PH in a controlled setting (<xref ref-type="bibr" rid="B28">Guan et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B49">Ma et&#xa0;al., 2019</xref>). Structure from motion (SFM) techniques yield PH that helps spot ideotype in soybeans (<xref ref-type="bibr" rid="B64">Roth et&#xa0;al., 2022</xref>). Canopy height and their temporal changes across the growing season were recorded using RGB imagery captured with a drone in different soybean cultivars (<xref ref-type="bibr" rid="B6">Borra-Serrano et&#xa0;al., 2020</xref>). Reliable information about soybean height was found when using a low-cost depth camera mounted on a ground-based phenomics platform (<xref ref-type="bibr" rid="B53">Morrison et&#xa0;al., 2021</xref>). This study further verified the PH information using information recorded in the field manually and using single-point LiDAR (SPL) with high precision, assuring the ability of LiDAR to perform precise PH estimation in soybeans. Similarly, <xref ref-type="bibr" rid="B48">Luo et&#xa0;al. (2021)</xref> recorded data using UAV-based LiDAR and explored the potential of UAV-based LiDAR sensors to estimate soybean height. To our knowledge, no other studies have used UAV-LiDAR multiple times to measure PH in soybeans. Furthermore, none of the previous studies compared the effectiveness of UAV-based LiDAR and RGB for PH estimation in soybean. As a result, there is not yet clear information regarding which aerial phenotyping sensors and timing are ideal for assessing PH in soybean. Hence, in this study, we used UAV-based RGB cameras and LiDAR sensors in the same field experiment to estimate soybean plant height across different periods. This study aims to evaluate the potential of UAV-based RGB and LiDAR sensors for accurately estimating soybean height at different growth and developmental stages. Hence, the objectives of this study is to assess the uncertainty in estimating soybean height with UAV-based RGB and LiDAR sensors and identify the best high throughput aerial phenotyping sensor for PH estimation in soybean.</p>
</sec>
<sec id="s2" sec-type="materials|methods">
<label>2</label>
<title>Materials and methods</title>
<sec id="s2_1">
<label>2.1</label>
<title>Experimental design</title>
<p>A field experiment was conducted in 2023 soybean growing season at Kentucky State University&#x2019;s Harold R. Benson Research and Demonstration Farm (38<sup>0</sup>7<sup>&#x2032;</sup> N; -84<sup>0</sup>53<sup>&#x2032;</sup> W; and 207 masl). The experiment was set up at Split-Split-Plot Randomized Complete Block Design with four replications (<xref ref-type="fig" rid="f1">
<bold>Figure&#xa0;1</bold>
</xref>). The main plot was biochar application: no application or biochar application at 12 tons/ha before planting. Four soybean genotypes (two commercial cultivars - PB2623, PB423, and two advanced breeding non-nodulating soybean lines - KS4120NSGT and KS4120NSGT_NN_NIL-268) were used in the experiment as the subplot. Similarly, sub-subplots were four different levels of late-season N fertilization: 0, 40, 80, and 120 kg N ha<sup>-1</sup>. There were 88 plots, each measuring 7.32 m (24 ft) long and 1.83 m (6 ft) wide, with a 90 cm (3 ft) alley separating the plots. Each plot had five rows and was 38 cm (15 inches) apart. Urea was used as N fertilizer, which was equally split into 3 doses at R5, 1 week after R5, and 2 weeks after R5. Soybean was planted on mid-May and harvested on the last week of September.</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>Study area location and experimental design: <bold>(A)</bold> Experimental area location and <bold>(B)</bold> Experimental design in the field.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1488760-g001.tif"/>
</fig>
</sec>
<sec id="s2_2">
<label>2.2</label>
<title>Data acquisition</title>
<sec id="s2_2_1">
<label>2.2.1</label>
<title>UAV data</title>
<p>The UAV-DJI Mavic 3M (DJI Technology Co., Ltd., Shenzhen, China) fitted with RGB and Multispectral sensor (MS), was employed to capture aerial images of crops. The imaging sensor used was a 1/2.8 -inch CMOS with a 25 mm focal length, capturing images at a resolution of 5280 x 3956 pixels. Drone Deploy was utilized to identify the target area for aerial photography on a satellite map and to plan the flight route by entering the necessary flight and camera parameters. Additionally, the DJI Matrice 300 fitted with Zenmuse L1 LiDAR sensor was utilized to gather aerial LiDAR data. <xref ref-type="table" rid="T1">
<bold>Table&#xa0;1</bold>
</xref> presents the detailed parameters of the drones used in the study. These aerial operations were conducted during solar noon- between 10 AM and 2 PM for optimal lighting conditions, reduced shadows, and uniform illumination. The UAV maintained a consistent altitude of 150 feet above ground level and a flight speed of 5 miles per hour throughout the aerial data collection period.</p>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Detailed parameter settings of the unmanned aerial vehicles.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="left">Parameter</th>
<th valign="top" align="left">DJI Mavic 3M</th>
<th valign="top" align="left">DJI Matrice 300</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Relative flight altitude</td>
<td valign="top" align="left">150 feet</td>
<td valign="top" align="left">150 feet</td>
</tr>
<tr>
<td valign="top" align="left">Flight speed</td>
<td valign="top" align="left">5 miles per hour</td>
<td valign="top" align="left">5 miles per hour</td>
</tr>
<tr>
<td valign="top" align="left">Forward overlap rate</td>
<td valign="top" align="left">85%</td>
<td valign="top" align="left">85%</td>
</tr>
<tr>
<td valign="top" align="left">Side overlap rate</td>
<td valign="top" align="left">85%</td>
<td valign="top" align="left">85%</td>
</tr>
<tr>
<td valign="top" align="left">Sensor</td>
<td valign="top" align="left">Multispectral and RGB</td>
<td valign="top" align="left">L1 LiDAR sensor</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Data collection with the UAV-LiDAR and RGB images occurred on June 6, July 7, July 18, and August 29. The first flight generated the reference ground to compute the digital terrain model (DTM). The rest of the dates correspond to critical growth stages of the crop, providing essential data for monitoring its development.</p>
<p>Six ground control points (GCPs) were set up and evenly distributed in the field, with three GCPs on each side. R12 Trimble (Trimble Inc., Sunnyvale, California) recorded the positions of the GCPs and checkpoints.</p>
</sec>
<sec id="s2_2_2">
<label>2.2.2</label>
<title>Field data</title>
<p>In the field, the PH was measured manually from the base to the tip of the main stem to serve as the ground reference height. Within each plot, three sampling locations were randomly chosen, each consisting of one plant from middle three rows to reduce border effect. This sampling approach ensures sufficient representation of plot level crop height (<xref ref-type="bibr" rid="B17">Dhami et&#xa0;al., 2020</xref>). The height of these plants was measured using a 1-meter ruler and the results were documented in a field notebook. Height measurements were taken on July 7, July 18, and August 29, aligning with key developmental stages of the soybean: R3 (beginning of pod development), R5 (onset of seed filling), and R7 (start of physiological maturity of the pod). <xref ref-type="fig" rid="f2">
<bold>Figure&#xa0;2</bold>
</xref> shows the further steps carried out after the acquisition of the aerial data and field data.</p>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>Workflow of the study showing data acquisition, data processing, and data analysis.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1488760-g002.tif"/>
</fig>
</sec>
</sec>
<sec id="s2_3">
<label>2.3</label>
<title>UAV data processing</title>
<p>In the Pix4Dmapper (Pix4D, Lausanne, Switzerland), the captured RGB images were geometrically corrected through orthorectification and stitched together using mosaicking techniques. Orthorectification ensures spatial consistency by correcting geometric distortions caused by terrain variation and camera orientation (<xref ref-type="bibr" rid="B78">Toutin, 2004</xref>). Similarly, high overlap rates help to provide repetition in image alignment and reduce gaps and mismatch in processed images (<xref ref-type="bibr" rid="B80">Turner et&#xa0;al., 2012</xref>; <xref ref-type="bibr" rid="B11">Colomina and Molina, 2014</xref>). These two important steps were instrumental to minimize splicing errors during mosaicking technique. These processes were carried out in Pix4Dmapper recognized for its precision in photogrammetric technique (<xref ref-type="bibr" rid="B26">Gon&#xe7;alves and Henriques, 2015</xref>). Ultimately, the digital surface model (DSM) and orthophotos were then generated using the software&#x2019;s structure from motion (SFM) techniques.</p>
<p>The raw LiDAR point clouds collected by the UAV were uploaded to DJI Terra software (DJI Technology Co., Ltd., Shenzhen, China), where noise filtering was conducted. The data were formatted into LAS files with specified output coordinates. These LAS files were further post-processed in R studio using the lidR package, which involved setting up scan angle, ground classification, and normalizing elevations to produce the final Digital terrain model (DTM) and DSM.</p>
<p>The generated DTM and DSM were imported into ArcGIS Pro (Esri Inc., Redlands, United States). Here, each image was georeferenced using the georeferencing tool to add control points, utilizing the x and y locations of 6 ground control points (GCPs) and 14 checkpoints. Subsequent elevation adjustments were made to ensure accuracy in the final geographic positioning and elevation data.</p>
</sec>
<sec id="s2_4">
<label>2.4</label>
<title>Establishment of ground or terrain reference</title>
<p>We used the earliest dates of lidar data we had, classified ground points, filtered for ground class only, and used these to represent the bare earth surface. We then used the ground control points (or GCPs) to establish the adjustment needed to offset the generated DTM elevation to match the actual elevation recorded by the GCPs. We had several GCPs on bare or near-bare earth to measure the deviation between these points and the actual GCPs, thereby having the adjustment needed to match the elevations. All the while ensuring adherence to a projected KY StatePlane coordinate reference system.</p>
<p>The adjustment performed is primarily to correct an error introduced first using different takeoff locations (i.e., different elevations) and the proclivities of the GPS-equipped devices to slightly misjudge the vertical elevation of the takeoff locations (i.e., 234.5 msl versus 245.1 msl recorded at the same spot) on any given day. While the global baselines are impacted, the relative elevation variation in our image is not affected before normalization. The corrected images were further georeferenced, and elevation adjustment was performed to align our images to accurate terrain before generating plot-level data.</p>
<p>In the case of RGB images, the earliest date from the RGB camera was used to generate a bare earth surface using the photogrammetry technique in the Pix4Dmapper. After generating terrain in the software, georeferencing and elevation adjustment were conducted in the ArcGIS Pro software.</p>
</sec>
<sec id="s2_5">
<label>2.5</label>
<title>Determination of optimum LiDAR scan angle and elevation adjustment</title>
<p>The LiDAR scan angle, which is the angle at which laser pulses are directed toward the ground, plays a crucial role in generating a precise DTM using a LiDAR sensor. The study focused on minimizing the scan angle as closer angles to zero tend to yield more accurate elevation data (<xref ref-type="bibr" rid="B18">Ehlert and Heisig, 2013</xref>). To fine-tune our methodology, we conducted several trials to determine the most effective scan angle ranges. We aimed to select angles close to zero that still provided precise elevation information to generate accurate DTM.</p>
<p>Terrain irregularities and slope variations can significantly affect the accuracy of height measurements from aerial data (<xref ref-type="bibr" rid="B67">Smith et&#xa0;al., 2019</xref>). As our study field had irregular terrain, elevation adjustment was carried out. To adjust the elevation values, buffers were created around adjustment points in the field. The mean elevation value of the buffers was computed, and the offset values were generated, which were further used to adjust the DTM and DSM raster. <xref ref-type="fig" rid="f3">
<bold>Figure&#xa0;3</bold>
</xref> illustrates the distribution in mean elevation values before adjustment and corrected mean elevation values after adjustment across various GCPs and checkpoints used as adjustment IDs.</p>
<fig id="f3" position="float">
<label>Figure&#xa0;3</label>
<caption>
<p>Distribution of mean elevation values across different GCPs and checkpoints: <bold>(A)</bold> before adjustment (RGB images); <bold>(B)</bold> after adjustment (RGB images); <bold>(C)</bold> before adjustment (LiDAR images); <bold>(D)</bold> after adjustment (LiDAR images).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1488760-g003.tif"/>
</fig>
<p>This step ensures that all raster accurately represent the terrain by aligning them with actual elevation data. This process focuses on ensuring the elevation data within the raster was precisely adjusted, providing an accurate base for further analysis, like creating a CHM from adjusted DTM and DSM.</p>
</sec>
<sec id="s2_6">
<label>2.6</label>
<title>DTM, DSM, and CHM extraction</title>
<p>For this study, we initially generated a bare-ground Digital Terrain Model (DTM) by employing photogrammetry techniques to process RGB imagery captured during the early growth phase on June 6. Following this, Digital Surface Models (DSM) were created using RGB imageries acquired from UAV flights during reproductive stages. We integrated and spatially aligned the DTM and DSMs using Ground Control Points (GCPs) within ArcGIS Pro.</p>
<p>For LiDAR point clouds, the DTM was constructed by isolating ground points from the dense point cloud data and interpolating between these points to form a continuous ground elevation model. As outlined by <xref ref-type="bibr" rid="B20">Evans and Hudak (2007)</xref>, the multiscale curvature classification algorithm facilitated the differentiation of ground and non-ground points. These points were further refined using a Triangulated Irregular Network (TIN) algorithm to produce the DTM for the LiDAR data recorded during the initial crop growth stage. Finally, the &#x2018;pixel metrics&#x2019; function from the lidR package was used to compute the DTM value from the LiDAR point cloud.</p>
<p>The DSM for subsequent dates utilized the same function (pixel metrics) to calculate each pixel&#x2019;s minimum, maximum, and difference. Typically, the maximum value represents the DSM and was utilized to compute CHM after georeferencing and elevation adjustment in ArcGIS Pro. We computed the CHM of the soybean crop in ArcGIS Pro using the raster calculator tool, which involved subtracting the DTM from the DSM at the pixel level. We further refined the CHM at the plot level using the &#x2018;extract by mask&#x2019; tool, integrating this with the plot shapefile feature class containing 88 identical plot shapes. Then, plot-level statistics were derived using the &#x2018;zonal statistics as a table&#x2019; as a spatial analyst tool. Through this methodical approach, we efficiently generated CHM data from both RGB images and LiDAR point cloud data in ArcGIS Pro.</p>
</sec>
<sec id="s2_7">
<label>2.7</label>
<title>Statistical analysis</title>
<p>The PH data derived from RGB, LiDAR, and manual measurements were analyzed using simple linear regression. In the simple linear regression model, manually measured PH was considered the dependent variable and sensor-based PH was used as an explanatory variable. We validated the soybean heights estimated from RGB and LiDAR against the manually measured PH. To evaluate the accuracy of these estimations, we calculated the coefficient of determination (R<sup>2</sup>), root mean square error (RMSE), and mean absolute error (MAE), which were computed to see the accuracy of LiDAR and RGB for estimating PH. The R<sup>2</sup> value assessed how closely the estimated values aligned with the measured values with higher R<sup>2</sup> values indicating a better fit. Conversely, lower RMSE and MAE values suggested greater accuracy in the estimates, quantifying the difference between the estimated and actual values. The formulas for calculating R<sup>2</sup>, RMSE, and MAE are provided to ensure a clear understanding of how these metrics are derived and interpreted.</p>
<disp-formula>
<mml:math display="block" id="M1">
<mml:mrow>
<mml:msup>
<mml:mi>R</mml:mi>
<mml:mn>2</mml:mn>
</mml:msup>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>N</mml:mi>
</mml:msubsup>
<mml:msup>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:mover accent="true">
<mml:mrow>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="true">^</mml:mo>
</mml:mover>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
<mml:mrow>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>N</mml:mi>
</mml:msubsup>
<mml:msup>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mover accent="true">
<mml:mi>y</mml:mi>
<mml:mo>&#xaf;</mml:mo>
</mml:mover>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>&#xa0;</mml:mo>
</mml:mrow>
</mml:msub>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:mfrac>
</mml:mrow>
</mml:math>
</disp-formula>
<disp-formula>
<mml:math display="block" id="M2">
<mml:mrow>
<mml:mi>R</mml:mi>
<mml:mi>M</mml:mi>
<mml:mi>S</mml:mi>
<mml:mi>E</mml:mi>
<mml:mo>=</mml:mo>
<mml:msqrt>
<mml:mrow>
<mml:mfrac>
<mml:mrow>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>N</mml:mi>
</mml:msubsup>
<mml:msup>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:mover accent="true">
<mml:mrow>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="true">^</mml:mo>
</mml:mover>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
<mml:mi>N</mml:mi>
</mml:mfrac>
</mml:mrow>
</mml:msqrt>
</mml:mrow>
</mml:math>
</disp-formula>
<disp-formula>
<mml:math display="block" id="M3">
<mml:mrow>
<mml:mi>M</mml:mi>
<mml:mi>A</mml:mi>
<mml:mi>E</mml:mi>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mn>1</mml:mn>
<mml:mi>N</mml:mi>
</mml:mfrac>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>N</mml:mi>
</mml:msubsup>
<mml:mo>|</mml:mo>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:mover accent="true">
<mml:mrow>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="true">^</mml:mo>
</mml:mover>
<mml:mo>|</mml:mo>
</mml:mrow>
</mml:math>
</disp-formula>
<p>Where N is the number of samples, <inline-formula>
<mml:math display="inline" id="im1">
<mml:mrow>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> and <inline-formula>
<mml:math display="inline" id="im2">
<mml:mrow>
<mml:mover accent="true">
<mml:mrow>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="true">^</mml:mo>
</mml:mover>
</mml:mrow>
</mml:math>
</inline-formula> are the measured and estimated PH, <inline-formula>
<mml:math display="inline" id="im3">
<mml:mrow>
<mml:mo>|</mml:mo>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:mover accent="true">
<mml:mrow>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="true">^</mml:mo>
</mml:mover>
<mml:mo>|</mml:mo>
</mml:mrow>
</mml:math>
</inline-formula> is the absolute error for the <inline-formula>
<mml:math display="inline" id="im4">
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>t</mml:mi>
<mml:mi>h</mml:mi>
</mml:mrow>
</mml:math>
</inline-formula> data, respectively <inline-formula>
<mml:math display="inline" id="im5">
<mml:mrow>
<mml:msub>
<mml:mover accent="true">
<mml:mi>y</mml:mi>
<mml:mo>&#xaf;</mml:mo>
</mml:mover>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>&#xa0;</mml:mo>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> is the average measured PH.</p>
</sec>
<sec id="s2_8">
<label>2.8</label>
<title>Modeling calibration and validation</title>
<p>Initially we assessed the individual sensor performance using a simple linear regression method. Later, we employed a variety of regression models, including multiple linear regression (MLR), partial least square regression (PLSR), random forest (RF), and Gaussian process regression (GPR) by integrating both RGB and LiDAR dataset to enhance predictive accuracy by leveraging the strength of both datasets. The dataset was split into training (80%) and testing (20%) sets using random partition method. The splitting was performed at plot level, so the training plots did not include any measurements from test plots. The training set was used to train the models, and the testing set was used to evaluate the performance metrics. The MLR and PLSR models were implemented using linear techniques, while RF and GPR were used for non-linear predictions. Each model was trained on the training dataset using the &#x2018;caret&#x2019; package in R Studio. The models&#x2019; performance was evaluated using three metrics: R<sup>2</sup>, RMSE, and MAE. 5-fold cross-validation was performed to assess the generalizability of the models. The parameter configuration for each model is clearly explained in <xref ref-type="table" rid="T2">
<bold>Table&#xa0;2</bold>
</xref>.</p>
<table-wrap id="T2" position="float">
<label>Table&#xa0;2</label>
<caption>
<p>Parameters and description of various regression models.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="left">Model</th>
<th valign="top" align="left">Parameter</th>
<th valign="top" align="left">Description</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Multiple Linear Regression (MLR)</td>
<td valign="top" align="left">Predictors</td>
<td valign="top" align="left">RGB, LiDAR</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">Coefficient</td>
<td valign="top" align="left">Automatically estimated using ordinary least squares (OLS)</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">Assumptions</td>
<td valign="top" align="left">Linearity, independence, homoscedasticity, and normality of residuals</td>
</tr>
<tr>
<td valign="top" align="left">Partial Least Square Regression (PLSR)</td>
<td valign="top" align="left">Number of components</td>
<td valign="top" align="left">Selected: 3 (via 5 fold cross validation)</td>
</tr>
<tr>
<td valign="top" align="left">Random Forest (RF)</td>
<td valign="top" align="left">Number of trees (ntree)</td>
<td valign="top" align="left">Default: 500</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">Number of variables (mtry)</td>
<td valign="top" align="left">Selected: 1 (via 5 fold cross validation)</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">Node size</td>
<td valign="top" align="left">Default: 5</td>
</tr>
<tr>
<td valign="top" align="left">Gaussian Process Regression (GPR)</td>
<td valign="top" align="left">Kernel type</td>
<td valign="top" align="left">Radial Basis Function (RBF) kernel (gaussprRadial method in caret)</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">Hyperparameters</td>
<td valign="top" align="left">Optimized via default settings in the caret package</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
</sec>
<sec id="s3" sec-type="results">
<label>3</label>
<title>Results</title>
<sec id="s3_1">
<label>3.1</label>
<title>Estimation of scan angle for the soybean PH study</title>
<p>The point closer to the nadir tends to provide a consistent elevation reading as the LiDAR pulses hit the ground more directly (closer to perpendicular), reducing the distortions. In our study, the most consistent and precise capture of the ground elevation appears to occur within the scan angle range of -15 to +15 for DTM generation, as shown in <xref ref-type="fig" rid="f4">
<bold>Figure&#xa0;4</bold>
</xref>. The distribution of the LiDAR pulses within this range is more tightly clustered, indicating the consistency in elevation information with reduced variability. When looking at the LiDAR pulses distribution across other angles farther from the nadir, more outlier values appeared in the dataset.</p>
<fig id="f4" position="float">
<label>Figure&#xa0;4</label>
<caption>
<p>Distribution of LiDAR pulses across different scan angles in various elevations within a transect in the experimental field.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1488760-g004.tif"/>
</fig>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Elevation adjustment</title>
<p>DSM and DTM raster were adjusted to locate the aerial images to their absolute position in the ground. Using the offset values generated around the GCPs and checkpoints, the raster created using both RGB and LiDAR platforms was adjusted. The shift in the elevation values in RGB and LiDAR-generated DSM was noticed after correcting them using adjustment values. In the case of the DSM generated using an RGB camera, the uppermost elevation values shifted to 228.436 meters from 222.013 meters (<xref ref-type="fig" rid="f5">
<bold>Figure&#xa0;5A</bold>
</xref>). In the DSM generated from aerial LiDAR, the uppermost elevation value shifted to 269.786 meters in the adjusted DSM adjusted DSM (<xref ref-type="fig" rid="f5">
<bold>Figure&#xa0;5B</bold>
</xref>).</p>
<fig id="f5" position="float">
<label>Figure&#xa0;5</label>
<caption>
<p>Digital surface model generated from <bold>(A)</bold> RGB and <bold>(B)</bold> LiDAR sensors on different dates. The upper plot layout shows the DSM before elevation adjustment, and the lower plots shows the adjusted DSM.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1488760-g005.tif"/>
</fig>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Ground reference height</title>
<p>
<xref ref-type="table" rid="T3">
<bold>Table&#xa0;3</bold>
</xref> shows the results from the manual height measurement in the field. It shows the descriptive data of field-measured PH conducted across 88 plots at three different soybean growth and developmental dates. The average PHs recorded on July 7, July 18, and August 29 were 0.39 meters, 0.65 meters, and 0.88 meters, respectively.</p>
<table-wrap id="T3" position="float">
<label>Table&#xa0;3</label>
<caption>
<p>Descriptive statistics of ground reference height.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="left">Date</th>
<th valign="top" align="left">Mean</th>
<th valign="top" align="left">Standard deviation</th>
<th valign="top" align="left">Minimum</th>
<th valign="top" align="left">Maximum</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="right">7-Jul</td>
<td valign="top" align="right">0.385</td>
<td valign="top" align="right">0.066</td>
<td valign="top" align="right">0.23</td>
<td valign="top" align="right">0.5</td>
</tr>
<tr>
<td valign="top" align="right">18-Jul</td>
<td valign="top" align="right">0.645</td>
<td valign="top" align="right">0.11</td>
<td valign="top" align="right">0.385</td>
<td valign="top" align="right">0.86</td>
</tr>
<tr>
<td valign="top" align="right">29-Aug</td>
<td valign="top" align="right">0.874</td>
<td valign="top" align="right">0.113</td>
<td valign="top" align="right">0.61</td>
<td valign="top" align="right">1.12</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3_4">
<label>3.4</label>
<title>Estimation of crop height using CHM</title>
<p>The PH measured manually in the field using a ruler was similar to the values estimated using aerial sensors mounted in the drones. The comparison can be seen in <xref ref-type="table" rid="T4">
<bold>Table&#xa0;4</bold>
</xref>, where the distribution of the height values among different sensors and ground reference height is shown. The pattern of PH distribution, in general, is similar on all three dates. However, a considerable variation in the PH distribution can be noticed on the 7th of July and 18th of July in the case of RGB vs. Manual and the 29th of August in the case of LiDAR vs. Manual PH comparison. Furthermore, the complete distribution of ground reference PH and the PH collected using different aerial sensors across different dates can be visualized in <xref ref-type="fig" rid="f6">
<bold>Figure&#xa0;6</bold>
</xref>.</p>
<table-wrap id="T4" position="float">
<label>Table&#xa0;4</label>
<caption>
<p>Average PH using different methods across different aerial dates of soybean.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" rowspan="2" align="left"/>
<th valign="top" colspan="3" align="left">7-Jul</th>
<th valign="top" colspan="3" align="left">18-Jul</th>
<th valign="top" colspan="3" align="left">29-Aug</th>
</tr>
<tr>
<th valign="top" align="left">Manual</th>
<th valign="top" align="left">RGB</th>
<th valign="top" align="left">LiDAR</th>
<th valign="top" align="left">Manual</th>
<th valign="top" align="left">RGB</th>
<th valign="top" align="left">LiDAR</th>
<th valign="top" align="left">Manual</th>
<th valign="top" align="left">RGB</th>
<th valign="top" align="left">LiDAR</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">PH (m)</td>
<td valign="top" align="left">0.39</td>
<td valign="top" align="left">0.57</td>
<td valign="top" align="left">0.35</td>
<td valign="top" align="left">0.65</td>
<td valign="top" align="left">0.85</td>
<td valign="top" align="left">0.54</td>
<td valign="top" align="left">0.87</td>
<td valign="top" align="left">0.82</td>
<td valign="top" align="left">0.73</td>
</tr>
</tbody>
</table>
</table-wrap>
<fig id="f6" position="float">
<label>Figure&#xa0;6</label>
<caption>
<p>PH distribution among 88 plots across three dates showing ground reference height by the solid line and sensor-based height by dotted line. The 2D plots were generated using ArcGIS Pro for RGB and LiDAR images.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1488760-g006.tif"/>
</fig>
<p>A simple linear regression analysis assessed the relationship between PHs derived from Canopy Height Models (CHM) using RGB and LiDAR sensors and the heights measured manually in the field across different dates. The ground reference PH and RGB-derived PH showed a moderate correlation with the coefficient of determination (R<sup>2</sup>) equal to 0.52. The comparison between manually measured soybean height against the LiDAR-derived PH showed a strong correlation with the coefficient of determination (R<sup>2</sup>) of 0.82. Scatterplots illustrating these comparisons are displayed in <xref ref-type="fig" rid="f7">
<bold>Figure&#xa0;7</bold>
</xref>: <xref ref-type="fig" rid="f7">
<bold>Figure&#xa0;7A</bold>
</xref> for UAV-based RGB vs manually measured PH, and <xref ref-type="fig" rid="f7">
<bold>Figure&#xa0;7B</bold>
</xref> for UAV-based LiDAR vs manually measured PH across various stages.</p>
<fig id="f7" position="float">
<label>Figure&#xa0;7</label>
<caption>
<p>Linear relationship between PH estimated using a ruler in the field and the PH measured using <bold>(A)</bold> RGB camera and the <bold>(B)</bold> LiDAR sensor throughout the season.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1488760-g007.tif"/>
</fig>
<p>In the case of manually measured PH versus RGB-derived PH, the coefficient of determination (R<sup>2</sup>) value ranged between 0.52 and 0.49 in decreasing order across July 7, July 18, and August 29. The correlation remained relatively consistent on July 7 and July 18, with an R<sup>2</sup> of approximately 0.52. On August 29, the correlation decreased slightly with an R<sup>2</sup> of 0.49. The PH distribution pattern was similar to RGB vs. manual PH measurement when comparing ground-measured and LiDAR-generated PH. The coefficient of determination (R<sup>2</sup>) between 0.75 and 0.29 was recorded across various dates in descending order, as shown in <xref ref-type="fig" rid="f8">
<bold>Figure&#xa0;8</bold>
</xref>. On July 7, the highest R<sup>2</sup> value was recorded; however, by August 29, the correlation dropped significantly to an R<sup>2</sup> of 0.29, suggesting that the precision of LiDAR may decrease as the crop progresses toward physiological maturity. Looking specifically at the RMSE and MAE values, as shown in <xref ref-type="table" rid="T5">
<bold>Table&#xa0;5</bold>
</xref>, the comparison between manual measurements and LiDAR-based PH showed the lowest values on July 7. In contrast, the comparison between RGB-measured PH and ground reference PH on July 18 generated the highest RMSE and MAE values, indicating a lesser agreement between ground reference height and RGB-derived height at that particular stage of the crop. The lesser RMSE and MAE values indicate the substantial agreement between the ground reference PH and sensor-measured height. Despite the inharmonious correlation values between the PH generated using aerial sensors and ground reference PH, their relationship remains statistically significant (p &lt; 0.001) across all the growth and developmental periods.</p>
<fig id="f8" position="float">
<label>Figure&#xa0;8</label>
<caption>
<p>Comparison of the manually measured PH and estimated PH using aerial (top) RGB and (bottom) LiDAR sensor.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1488760-g008.tif"/>
</fig>
<table-wrap id="T5" position="float">
<label>Table&#xa0;5</label>
<caption>
<p>Comparative result between RGB and LiDAR sensors against manual measurements.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" rowspan="2" align="left"/>
<th valign="top" colspan="3" align="left">RGB vs. Manual</th>
<th valign="top" colspan="3" align="left">LiDAR vs. Manual</th>
</tr>
<tr>
<th valign="top" align="left">7 July</th>
<th valign="top" align="left">18 July</th>
<th valign="top" align="left">29 August</th>
<th valign="top" align="left">7 July</th>
<th valign="top" align="left">18 July</th>
<th valign="top" align="left">29 August</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">R<sup>2</sup>
</td>
<td valign="top" align="left">0.52</td>
<td valign="top" align="left">0.51</td>
<td valign="top" align="left">0.49</td>
<td valign="top" align="left">0.75</td>
<td valign="top" align="left">0.74</td>
<td valign="top" align="left">0.29</td>
</tr>
<tr>
<td valign="top" align="left">RMSE (m)</td>
<td valign="top" align="left">0.2</td>
<td valign="top" align="left">0.24</td>
<td valign="top" align="left">0.11</td>
<td valign="top" align="left">0.05</td>
<td valign="top" align="left">0.12</td>
<td valign="top" align="left">0.18</td>
</tr>
<tr>
<td valign="top" align="left">MAE (m)</td>
<td valign="top" align="left">0.18</td>
<td valign="top" align="left">0.21</td>
<td valign="top" align="left">0.08</td>
<td valign="top" align="left">0.04</td>
<td valign="top" align="left">0.11</td>
<td valign="top" align="left">0.15</td>
</tr>
<tr>
<td valign="top" align="left">T</td>
<td valign="top" align="left">9.7</td>
<td valign="top" align="left">9.5</td>
<td valign="top" align="left">9.2</td>
<td valign="top" align="left">16.2</td>
<td valign="top" align="left">15.9</td>
<td valign="top" align="left">5.9</td>
</tr>
<tr>
<td valign="top" align="left">P</td>
<td valign="top" align="left">***</td>
<td valign="top" align="left">***</td>
<td valign="top" align="left">***</td>
<td valign="top" align="left">***</td>
<td valign="top" align="left">***</td>
<td valign="top" align="left">***</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>***, **, * indicate significance at 0.001, 0.01, and 0.1 levels respectively.</p>
</fn>
</table-wrap-foot>
</table-wrap>
<p>Various regression models were employed to evaluate the accuracy and effectiveness of LiDAR and RGB sensors in predicting soybean PH. Meanwhile, outliers were identified using residual diagnostic as shown in <xref ref-type="fig" rid="f9">
<bold>Figure&#xa0;9</bold>
</xref>. They were included in the analysis unless they exceeded 3 standard deviations, as their effect on the model was minimal.</p>
<fig id="f9" position="float">
<label>Figure&#xa0;9</label>
<caption>
<p>Residual plots for different regression models with &#xb1;3 standard deviation thresholds.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1488760-g009.tif"/>
</fig>
<p>To evaluate the performance of our regression models in predicting PH, we calculated the R<sup>2</sup>, RMSE, and MAE on the test dataset summarized in the <xref ref-type="fig" rid="f10">
<bold>Figure&#xa0;10</bold>
</xref>.</p>
<fig id="f10" position="float">
<label>Figure&#xa0;10</label>
<caption>
<p>Performance metrics for different regression models (Multiple linear regression, partial least square regression, random forest, and Gaussian process regression) on the test data set.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1488760-g010.tif"/>
</fig>
<p>The GPR model exhibited the highest R<sup>2</sup> value of 0.85, indicating that it explains 85% of the variance in the PH data. Additionally, GPR has the lowest RMSE and MAE, suggesting superior predictive accuracy and consistency compared to the other models. The RF model also performed well, with an R<sup>2</sup> of 0.79, RMSE of 0.09, and MAE of 0.07. The MLR model achieved an R<sup>2</sup> of 0.77, RMSE of 0.1, and MAE of 0.078, indicating reasonable performance with higher prediction error compared to GPR and RF. The PLSR model had the lowest performance, with an R<sup>2</sup> of 0.73, RMSE of 0.11, and MAE of 0.08.</p>
<p>To assess the generalizability of the models, 5-fold cross-validation was performed, with the results summarized in <xref ref-type="table" rid="T6">
<bold>Table&#xa0;6</bold>
</xref>. The cross-validation metrics provide a more robust estimate of the model performance by averaging the result across multiple folds. The RF model maintained strong performance in cross-validation, with a mean R<sup>2</sup> of 0.85, RMSE of 0.09, and MAE of 0.06. The GPR model showed consistent results with a mean R<sup>2</sup> of 0.8, RMSE of 0.11, and MAE of 0.08, demonstrating its reliability and stability.</p>
<table-wrap id="T6" position="float">
<label>Table&#xa0;6</label>
<caption>
<p>Summarized result of 5-fold cross-validation metrics.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="left">Model</th>
<th valign="top" align="left">R<sup>2</sup> Mean</th>
<th valign="top" align="left">R<sup>2</sup> SD</th>
<th valign="top" align="left">RMSE Mean</th>
<th valign="top" align="left">RMSE SD</th>
<th valign="top" align="left">MAE Mean</th>
<th valign="top" align="left">MAE SD</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">MLR</td>
<td valign="top" align="left">0.84</td>
<td valign="top" align="left">0.01</td>
<td valign="top" align="left">0.09</td>
<td valign="top" align="left">0.002</td>
<td valign="top" align="left">0.07</td>
<td valign="top" align="left">0.002</td>
</tr>
<tr>
<td valign="top" align="left">PLSR</td>
<td valign="top" align="left">0.78</td>
<td valign="top" align="left">0.021</td>
<td valign="top" align="left">0.1</td>
<td valign="top" align="left">0.004</td>
<td valign="top" align="left">0.08</td>
<td valign="top" align="left">0.004</td>
</tr>
<tr>
<td valign="top" align="left">RF</td>
<td valign="top" align="left">0.85</td>
<td valign="top" align="left">0.02</td>
<td valign="top" align="left">0.09</td>
<td valign="top" align="left">0.006</td>
<td valign="top" align="left">0.06</td>
<td valign="top" align="left">0.005</td>
</tr>
<tr>
<td valign="top" align="left">GPR</td>
<td valign="top" align="left">0.8</td>
<td valign="top" align="left">0.03</td>
<td valign="top" align="left">0.11</td>
<td valign="top" align="left">0.006</td>
<td valign="top" align="left">0.08</td>
<td valign="top" align="left">0.008</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The MLR model achieved a mean R<sup>2</sup> of 0.84, RMSE of 0.09, and MAE of 0.07, indicating good performance with low variability, as reflected by the slight standard deviation. The PLSR model had a mean R<sup>2</sup> of 0.78, RMSE of 0.1, and MAE of 0.08, showing slightly lower performance compared to the other models but with acceptable stability.</p>
</sec>
</sec>
<sec id="s4" sec-type="discussion">
<label>4</label>
<title>Discussion</title>
<p>This study focused on identifying the most effective sensor between the two most widely used sensors (RGB camera and LiDAR) for PH estimation using UAVs. Across the crop growth cycle, HTAP was conducted at three different times. Our result showed that PH estimation in soybean using UAV-based LiDAR (R<sup>2</sup> = 0.83) could be the most reliable than the UAV-equipped RGB camera (R<sup>2</sup> = 0.53) in the pod growth and seed filling stages. However, the result showed the reliability of deploying RGB cameras, specifically in the physiological maturity stage when LiDAR cannot capture highly correlated results. Similarly, the study highlighted factors like scan angle and elevation adjustment critical in canopy height generation using aerial platforms.</p>
<sec id="s4_1">
<label>4.1</label>
<title>Estimation of crop height</title>
<p>In this study, we assessed crop height using RGB and LiDAR sensors across three soybean growth stages. To enhance the precision of crop height predictions from RGB imageries, we adjusted the DTM and DSM rasters by applying elevation adjustment. These adjustments used offset values calculated from buffers around GCPs and checkpoints. For LiDAR point clouds, we set appropriate scan angles and performed ground classification using the multiscale curvature classification (MCC) method, which effectively distinguishes between ground and non-ground points. The MCC algorithm, along with the Progressive morphological filter (PMF) and cloth simulation function (CSF), was evaluated to refine the DTM accuracy. PMF, as described by <xref ref-type="bibr" rid="B95">Zhang et&#xa0;al. (2003)</xref> classifies points as ground and non-ground points based on a dual-threshold approach. Similarly, CSF, as explained by <xref ref-type="bibr" rid="B97">Zhang et&#xa0;al. (2016)</xref> simulates a virtual cloth dropped over an inverted point cloud to identify ground points, and MCC uses curvature thresholds to interpolate the ground surface as explained by <xref ref-type="bibr" rid="B20">Evans and Hudak (2007)</xref>. The PMF algorithm might remove essential terrain details by classifying ground points as non-ground (<xref ref-type="bibr" rid="B95">Zhang et&#xa0;al., 2003</xref>). Similarly, the CSF algorithm struggles with the classification of low vegetation and MCC is well-suited for the classification of complex vegetated surfaces (<xref ref-type="bibr" rid="B63">Roberts et&#xa0;al., 2019</xref>). In our study, ground classification was done during the extraction of the DTM dataset at the early growth stage of soybeans, so MCC was preferred over CSF to obtain accurate terrain information.</p>
<p>For UAV-based data collection, RGB cameras offer a cost-effective solution despite their limitations in penetrating dense canopies (<xref ref-type="bibr" rid="B10">Cao et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B48">Luo et&#xa0;al., 2021</xref>). The images can also be processed in user-friendly processing software. The structure from motion (SfM) photogrammetry technique is used to gather canopy height and structure details from high-resolution images as this method can generate a point cloud from several images (<xref ref-type="bibr" rid="B39">Kalacska et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B12">Coops et&#xa0;al., 2021</xref>). UAV-based RGB camera estimated PH and proved as an important proxy for dry biomass in summer barley (<xref ref-type="bibr" rid="B4">Bendig et&#xa0;al., 2015</xref>). UAV-based imaging measurement system quantified PH with minimum error in cotton (<xref ref-type="bibr" rid="B22">Feng et&#xa0;al., 2019</xref>). PH and leaf area index (LAI) of different soybean varieties were estimated using a Kinect 2.0 sensor indoors (<xref ref-type="bibr" rid="B49">Ma et&#xa0;al., 2019</xref>). Conversely, LiDAR technology excels by penetrating crop canopies to measure PH accurately, unaffected by external lighting (<xref ref-type="bibr" rid="B94">Yuan et&#xa0;al., 2018</xref>). The advantageous features of LiDAR include its ability to penetrate the crop canopy, enabling it to reach the ground (<xref ref-type="bibr" rid="B15">Dalla Corte et&#xa0;al., 2022</xref>) and supply 3D structural information invaluable for HTAP (<xref ref-type="bibr" rid="B58">Parker et&#xa0;al., 2004</xref>; <xref ref-type="bibr" rid="B57">Omasa et&#xa0;al., 2006</xref>). Terrestrial laser scanning (TLS) produced promising PH and showed its potential for non-destructive biomass estimation in maize (<xref ref-type="bibr" rid="B76">Tilly et&#xa0;al., 2014b</xref>) and rice (<xref ref-type="bibr" rid="B75">Tilly et&#xa0;al., 2014a</xref>). Multiple sensors mounted on commercial wild blueberry harvesters proved very efficient at estimating PH and fruit yield (<xref ref-type="bibr" rid="B21">Farooque et&#xa0;al., 2013</xref>). A study on winter wheat using a field phenomics platform (FPP) of LiDAR and a time of flight (ToF) camera produced a strongly correlated PH with manual height (<xref ref-type="bibr" rid="B101">Zhou et&#xa0;al., 2015</xref>). UAV-LiDAR was effective in estimating PH in sugar beet and wheat, while it was difficult in potatoes due to the complex canopy structure and uneven terrain created by ridges and furrows (<xref ref-type="bibr" rid="B72">ten Harkel et&#xa0;al., 2019</xref>).</p>
<p>Our study found that the UAV-mounted LiDAR more accurately predicted soybean height which aligns with similar conclusions in other crops like wheat (<xref ref-type="bibr" rid="B50">Madec et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B36">Jimenez-Berni et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B94">Yuan et&#xa0;al., 2018</xref>), sorghum (<xref ref-type="bibr" rid="B51">Maimaitijiang et&#xa0;al., 2020</xref>), and maize (<xref ref-type="bibr" rid="B46">Liu et&#xa0;al., 2024</xref>). In our study, there was a strong correlation between the PH obtained from the LiDAR sensor and manual measurement on July 7 and July 18. The R<sup>2</sup> values obtained in all three growth stages were greater than those of the earlier study done by <xref ref-type="bibr" rid="B48">Luo et&#xa0;al. (2021)</xref> using UAV-LiDAR in soybean. This may be attributed to the overestimated DTM, which led to a smaller correlation value in the earlier studies. To avoid DTM overestimation, our study used aerial data when the soybean field was completely visible, and soybean were in a very small growth stage. We observe a decline in R<sup>2</sup> value in the R7 stage which aligns with similar studies in maize by <xref ref-type="bibr" rid="B102">Zhu et&#xa0;al. (2020)</xref> where a significant decrease in plant length, PH, canopy height, and plant width was observed as the plant progressed toward maturity and leaves fell off. At the R7 stage, soybeans undergo senescence, leading to leaf drop and changes in canopy structure. These changes might affect the LiDAR&#x2019;s ability to capture accurate plant height due to reduced canopy density and increased exposure to underlying structure that might have reduced the plot aggregated mean of all the pixels. Photogrammetry PH shows a moderate correlation in all the R3, R5, and R7 stages as demonstrated by moderate R<sup>2</sup> values. However, increasing RMSE value indicated increasing deviation of RGB-derived PH from manual measurement. Similar results were found in the earlier studies where the CHM created using the SfM technique exhibited some inaccuracies in height measurement, specifically noticeable in shorter plants (<xref ref-type="bibr" rid="B14">Cunliffe et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B87">Wijesingha et&#xa0;al., 2019</xref>). Our finding of PH obtained from RGB showing moderate correlation aligns with the conclusion from previous research on corn (<xref ref-type="bibr" rid="B27">Grenzd&#xf6;rffer, 2014</xref>; <xref ref-type="bibr" rid="B2">Bareth et&#xa0;al., 2016</xref>), which indicated that the photogrammetry technique struggles to reconstruct the uppermost parts of the canopy accurately. The overestimation in the PH estimated from the RGB camera in comparison to the LiDAR sensor may be partly due to the disparity in the spatial resolution of the two sensor systems as well as differences in canopy penetration capacity (<xref ref-type="bibr" rid="B50">Madec et&#xa0;al., 2017</xref>).</p>
<p>Overall, the regression models validated the PH predictions, with Gaussian Process Regression (GPR) showing the best performance and MLR and PLSR the least. Incorporation of the model improved the soybean height prediction demonstrated by the increased R<sup>2</sup> from 0.83 (LiDAR) to 0.85 (GPR). A similar result was obtained in the study of PH using UAV-based oblique photography and LiDAR sensor in maize (<xref ref-type="bibr" rid="B46">Liu et&#xa0;al., 2024</xref>). This study underscores the effectiveness of integrating multiple sensing technologies and analytical models to optimize the accuracy of crop height assessments throughout different stages of plant growth.</p>
</sec>
<sec id="s4_2">
<label>4.2</label>
<title>Influence of scan angle in PH prediction using LiDAR sensor</title>
<p>The complex plant morphology makes it harder for laser penetration which makes it difficult to obtain accurate measurements (<xref ref-type="bibr" rid="B65">Saeys et&#xa0;al., 2009</xref>). Earlier studies in winter wheat and winter rye demonstrated that overestimation is low for smaller angles and higher for increasing angles (<xref ref-type="bibr" rid="B18">Ehlert and Heisig, 2013</xref>). They further concluded the necessity of evaluating the role of scan angle in overestimating the measurement error in individual crop species (<xref ref-type="bibr" rid="B90">Xu et&#xa0;al., 2023</xref>) identified and developed a correction model based on scan angle to improve grassland canopy height estimation and demonstrated considerable improvement in PH from their corrected model (<xref ref-type="bibr" rid="B29">Guo et&#xa0;al., 2019</xref>) showed how varying scan angles and positions significantly influence accuracy in wheat height measurement throughout its growth stages. Earlier studies using LiDAR technology in predicting PH have not explored the influence of scan angle. However, we observed the distribution of LiDAR pulses across different scan angles and made efforts to identify appropriate scan angles. Reducing the scan angle towards zero can significantly enhance the accuracy of elevation data, particularly in establishing the digital elevation model (<xref ref-type="bibr" rid="B47">Lohr, 1998</xref>). To restrict our scan angle ranges to zero we evaluated the distribution of LiDAR pulses across various scan angles and identified that the -15 to 15-degree range consistently captured our ground feature. Our choice of scan angle range was further validated when ground points obtained using the MCC algorithm were uniformly distributed to zero elevation value. Thus, our study identified the optimum scan angle range in soybean PH estimation. Further, studies need to be conducted to quantify the effect of different scan angle ranges on the PH.</p>
</sec>
<sec id="s4_3">
<label>4.3</label>
<title>Significance of elevation adjustment in PH estimation</title>
<p>In our study, we observed inconsistencies in field elevation, underscoring the necessity of elevation adjustment to enhance the accuracy of PH predictions. PH estimation is particularly susceptible to biases stemming from errors in Digital Terrain Models (DTM) and Digital Surface Models (DSM), as well as the effects of wind (<xref ref-type="bibr" rid="B31">Han et&#xa0;al., 2018</xref>). Accurate elevation information is crucial for precision agriculture, as it allows for a detailed understanding of elevation gradients across the research field, which is essential for estimating precise elevation. Despite advancements in remote sensing technologies such as LiDAR and InSAR, which offer improved vegetation height assessments, adjustments for elevation are still required to refine these measurements (<xref ref-type="bibr" rid="B7">Breidenbach et&#xa0;al., 2008</xref>). The variability in ground profiles, influenced by external factors and the operation of agricultural machinery, further complicates accurate ground-level detection (<xref ref-type="bibr" rid="B71">Sun et&#xa0;al., 2022</xref>). <xref ref-type="bibr" rid="B52">Malambo et&#xa0;al. (2018)</xref> also highlight the significant impact that accurate ground surface detection has on PH estimation accuracy.</p>
<p>The role of elevation is particularly critical in ecological studies, such as predicting plant species distribution in mountainous areas, where the pattern of elevation is a key determinant (<xref ref-type="bibr" rid="B56">Oke and Thompson, 2015</xref>). Additionally, the creation of DTMs from aerial data, whether from LiDAR or photogrammetric methods, is susceptible to inherent inaccuracies due to sensor noise, atmospheric conditions, or the angle of data acquisition (<xref ref-type="bibr" rid="B33">Hug et&#xa0;al., 2004</xref>). <xref ref-type="bibr" rid="B74">Tilly et&#xa0;al. (2015)</xref> noted that elevation adjustments are essential when integrating multiple datasets collected at different times or using various technologies, to ensure a consistent reference point across datasets.</p>
<p>Adjusting for elevation not only enhances the precision of PH measurements but also improves the overall interpretation of remote sensing data, facilitating applications such as crop monitoring, yield prediction, and precision farming (<xref ref-type="bibr" rid="B3">Bendig et&#xa0;al., 2014</xref>; <xref ref-type="bibr" rid="B54">Mulla and Belmont, 2018</xref>). While many studies have acknowledged biases in PH prediction, few have addressed the influence of elevation adjustment as comprehensively as <xref ref-type="bibr" rid="B74">Tilly et&#xa0;al. (2015)</xref> and <xref ref-type="bibr" rid="B3">Bendig et&#xa0;al. (2014)</xref>. In this study, we employed this approach to derive the improved DTMs and DSMs, leading to improved PH estimations from both aerial RGB and LiDAR sensors. This methodological advancement contributes significantly to the field of precision agriculture by providing more reliable data for crop management and research.</p>
</sec>
<sec id="s4_4">
<label>4.4</label>
<title>Practical application of the study</title>
<p>RGB cameras and LiDAR are the two most popular sensors used for high-throughput plant height estimation techniques across agricultural operations on proximal or aerial platforms. Multiple studies across various crops have already demonstrated the usefulness of both the sensors for reliable plant height estimation. Those results have been valuable sources for farmers, agronomists, and breeders for high throughput plant phenotyping. In the case of soybeans, there are fewer studies regarding UAV-based plant height estimation techniques. The results of this study suggest LiDAR as the most effective sensor for soybean plant height estimation between pod development to seed filling. However, low-cost RGB cameras was found more effective to predict plant height at a later stage (onset of physiological maturity). Thus, the experimental results from this study would be useful to the agricultural researchers and farmers for the selection of the most effective sensor for plant height estimation during different growth stages in soybeans. Recording plant height at an appropriate time using the most effective sensor will help farmers make an informed decision regarding crop management, as plant height is one of the most important proxies for estimating soybean yield and biomass.</p>
</sec>
</sec>
<sec id="s5" sec-type="conclusions">
<label>5</label>
<title>Conclusion</title>
<p>This study explored low-cost RGB and LiDAR sensors (the most popular for PH studies) to evaluate which sensors produced more effective results for the PH estimation in soybean. An appropriate scan angle range was identified during the data processing, and ground classification was done using the MCC algorithm to compute precise DTM values. The CHM-based PH obtained from RGB and LiDAR sensors was compared with ground reference PH collected manually. Low-cost RGB cameras showed a moderate and consistent correlation across all three growth stages. In contrast, LiDAR demonstrated superior accuracy for soybean height estimation. However, aerial data collection timing and scan angle could significantly influence the result. Furthermore, low-cost RGB cameras could still be a more reliable option than LiDAR sensors for estimating soybean height at a later stage. This study verified the potential of low-cost RGB cameras and LiDAR in assessing soybean PH at different growth stages. The results from this study would help select appropriate aerial phenotyping sensors for estimating PH during different soybean growth stages.</p>
</sec>
</body>
<back>
<sec id="s6" sec-type="data-availability">
<title>Data availability statement</title>
<p>The original contributions presented in the study are included in the article/supplementary material. Further inquiries can be directed to the corresponding author.</p>
</sec>
<sec id="s7" sec-type="author-contributions">
<title>Author contributions</title>
<p>LP: Investigation, Project administration, Data curation, Formal Analysis, Methodology, Writing &#x2013; original draft. JS: Investigation, Funding acquisition, Supervision, Writing &#x2013; review &amp; editing. DK: Investigation, Writing &#x2013; review &amp; editing. SP: Investigation, Writing &#x2013; review &amp; editing. SK: Investigation, Writing &#x2013; review &amp; editing. BG: Writing &#x2013; review &amp; editing, Funding acquisition, Supervision. MG: Funding acquisition, Writing &#x2013; review &amp; editing, Supervision. AC: Conceptualization, Funding acquisition, Writing &#x2013; review &amp; editing, Investigation, Project administration, Resources, Supervision, Validation.</p>
</sec>
<sec id="s8" sec-type="funding-information">
<title>Funding</title>
<p>The author(s) declare financial support was received for the research, authorship, and/or publication of this article. This study was funded by USDA-NIFA 1890 Capacity Building Grant (Award Number 2023-38821-39960) to the Kentucky State University.</p>
</sec>
<ack>
<title>Acknowledgments</title>
<p>The authors are grateful to the farm staffs at Harold R. Benson Research and Demonstration Farm, Kentucky State University, for their help with experimental field management and related farm operations throughout the crop growing season.</p>
</ack>
<sec id="s9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted without any commercial or financial relationships that could potentially create a conflict of interest.</p>
</sec>
<sec id="s10" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>And&#xfa;jar</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Escol&#xe0;</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Rosell-Polo</surname> <given-names>J. R.</given-names>
</name>
<name>
<surname>Fern&#xe1;ndez-Quintanilla</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Dorado</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>Potential of a terrestrial LiDAR-based system to characterise weed vegetation in maize crops</article-title>. <source>Comput. Electron Agric.</source> <volume>92</volume>, <fpage>11</fpage>&#x2013;<lpage>15</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2012.12.012</pub-id>
</citation>
</ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bareth</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Bendig</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Tilly</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Hoffmeister</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Aasen</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Bolten</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>A comparison of UAV-and TLS-derived plant height for crop monitoring: using polygon grids for the analysis of crop surface models (CSMs)</article-title>. <source>Photogramm. Fernerkund. Geoinf</source> <volume>2016</volume>, <fpage>85</fpage>&#x2013;<lpage>94</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1127/pfg/2016/0289</pub-id>
</citation>
</ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bendig</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Bolten</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Bennertz</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Broscheit</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Eichfuss</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Bareth</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Estimating biomass of barley using crop surface models (CSMs) derived from UAV-based RGB imaging</article-title>. <source>Remote Sens (Basel)</source> <volume>6</volume>, <fpage>10395</fpage>&#x2013;<lpage>10412</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs61110395</pub-id>
</citation>
</ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bendig</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Aasen</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Bolten</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Bennertz</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Broscheit</surname> <given-names>J.</given-names>
</name>
<etal/>
</person-group>. (<year>2015</year>). <article-title>Combining UAV-based plant height from crop surface models, visible, and near infrared vegetation indices for biomass monitoring in barley</article-title>. <source>Int. J. Appl. Earth Observation Geoinformation</source> <volume>39</volume>, <fpage>79</fpage>&#x2013;<lpage>87</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jag.2015.02.012</pub-id>
</citation>
</ref>
<ref id="B5">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Blanquart</surname> <given-names>J.-E.</given-names>
</name>
<name>
<surname>Sirignano</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Lenaerts</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Saeys</surname> <given-names>W.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Online crop height and density estimation in grain fields using LiDAR</article-title>. <source>Biosyst. Eng.</source> <volume>198</volume>, <fpage>1</fpage>&#x2013;<lpage>14</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2020.06.014</pub-id>
</citation>
</ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Borra-Serrano</surname> <given-names>I.</given-names>
</name>
<name>
<surname>De Swaef</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Quataert</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Aper</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Saleem</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Saeys</surname> <given-names>W.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>Closing the phenotyping gap: high resolution UAV time series for soybean growth analysis provides objective data from field trials</article-title>. <source>Remote Sens (Basel)</source> <volume>12</volume>, <elocation-id>1644</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs12101644</pub-id>
</citation>
</ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Breidenbach</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Koch</surname> <given-names>B.</given-names>
</name>
<name>
<surname>K&#xe4;ndler</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Kleusberg</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2008</year>). <article-title>Quantifying the influence of slope, aspect, crown shape and stem density on the estimation of tree height at plot level using lidar and InSAR data</article-title>. <source>Int. J. Remote Sens</source> <volume>29</volume>, <fpage>1511</fpage>&#x2013;<lpage>1536</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/01431160701736364</pub-id>
</citation>
</ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Brocks</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Bareth</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Estimating barley biomass with crop surface models from oblique RGB imagery</article-title>. <source>Remote Sens (Basel)</source> <volume>10</volume>, <elocation-id>268</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs10020268</pub-id>
</citation>
</ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Calders</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Adams</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Armston</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Bartholomeus</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Bauwens</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Bentley</surname> <given-names>L. P.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>Terrestrial laser scanning in forest ecology: Expanding the horizon</article-title>. <source>Remote Sens Environ.</source> <volume>251</volume>, <elocation-id>112102</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2020.112102</pub-id>
</citation>
</ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Cao</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Han</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Su</surname> <given-names>B.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>A comparison of UAV RGB and multispectral imaging in phenotyping for stay green of wheat population</article-title>. <source>Remote Sens (Basel)</source> <volume>13</volume>, <elocation-id>5173</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs13245173</pub-id>
</citation>
</ref>
<ref id="B11">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Colomina</surname> <given-names>I.</given-names>
</name>
<name>
<surname>Molina</surname> <given-names>P.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Unmanned aerial systems for photogrammetry and remote sensing: A review</article-title>. <source>ISPRS J. Photogrammetry Remote Sens.</source> <volume>92</volume>, <fpage>79</fpage>&#x2013;<lpage>97</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.isprsjprs.2014.02.013</pub-id>
</citation>
</ref>
<ref id="B12">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Coops</surname> <given-names>N. C.</given-names>
</name>
<name>
<surname>Tompalski</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Goodbody</surname> <given-names>T. R. H.</given-names>
</name>
<name>
<surname>Queinnec</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Luther</surname> <given-names>J. E.</given-names>
</name>
<name>
<surname>Bolton</surname> <given-names>D. K.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Modelling lidar-derived estimates of forest attributes over space and time: A review of approaches and future trends</article-title>. <source>Remote Sens Environ.</source> <volume>260</volume>, <elocation-id>112477</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2021.112477</pub-id>
</citation>
</ref>
<ref id="B13">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Cornelissen</surname> <given-names>J. H. C.</given-names>
</name>
<name>
<surname>Lavorel</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Garnier</surname> <given-names>E.</given-names>
</name>
<name>
<surname>D&#xed;az</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Buchmann</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Gurvich</surname> <given-names>D. E.</given-names>
</name>
<etal/>
</person-group>. (<year>2003</year>). <article-title>A handbook of protocols for standardised and easy measurement of plant functional traits worldwide</article-title>. <source>Aust. J. Bot.</source> <volume>51</volume>, <fpage>335</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1071/BT02124</pub-id>
</citation>
</ref>
<ref id="B14">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Cunliffe</surname> <given-names>A. M.</given-names>
</name>
<name>
<surname>Brazier</surname> <given-names>R. E.</given-names>
</name>
<name>
<surname>Anderson</surname> <given-names>K.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Ultra-fine grain landscape-scale quantification of dryland vegetation structure with drone-acquired structure-from-motion photogrammetry</article-title>. <source>Remote Sens Environ.</source> <volume>183</volume>, <fpage>129</fpage>&#x2013;<lpage>143</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2016.05.019</pub-id>
</citation>
</ref>
<ref id="B15">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Dalla Corte</surname> <given-names>A. P.</given-names>
</name>
<name>
<surname>de Vasconcellos</surname> <given-names>B. N.</given-names>
</name>
<name>
<surname>Rex</surname> <given-names>F. E.</given-names>
</name>
<name>
<surname>Sanquetta</surname> <given-names>C. R.</given-names>
</name>
<name>
<surname>Mohan</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Silva</surname> <given-names>C. A.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>Applying high-resolution UAV-liDAR and quantitative structure modelling for estimating tree attributes in a crop-livestock-forest system</article-title>. <source>Land (Basel)</source> <volume>11</volume>, <elocation-id>507</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/land11040507</pub-id>
</citation>
</ref>
<ref id="B16">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Deery</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Jimenez-Berni</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Jones</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Sirault</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Furbank</surname> <given-names>R.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Proximal remote sensing buggies and potential applications for field-based phenotyping</article-title>. <source>Agronomy</source> <volume>4</volume>, <fpage>349</fpage>&#x2013;<lpage>379</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy4030349</pub-id>
</citation>
</ref>
<ref id="B17">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Dhami</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Dhakal</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Friel</surname> <given-names>J.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). &#x201c;<article-title>Crop height and plot estimation for phenotyping from unmanned aerial vehicles using 3D liDAR</article-title>,&#x201d; in <source>2020 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</source> (<publisher-loc>Las Vegas, NV, USA</publisher-loc>: <publisher-name>IEEE</publisher-name>), <fpage>2643</fpage>&#x2013;<lpage>2649</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/IROS45743.2020.9341343</pub-id>
</citation>
</ref>
<ref id="B18">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ehlert</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Heisig</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>Sources of angle-dependent errors in terrestrial laser scanner-based crop stand measurement</article-title>. <source>Comput. Electron Agric.</source> <volume>93</volume>, <fpage>10</fpage>&#x2013;<lpage>16</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2013.01.002</pub-id>
</citation>
</ref>
<ref id="B19">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Erten</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Lopez-Sanchez</surname> <given-names>J. M.</given-names>
</name>
<name>
<surname>Yuzugullu</surname> <given-names>O.</given-names>
</name>
<name>
<surname>Hajnsek</surname> <given-names>I.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Retrieval of agricultural crop height from space: A comparison of SAR techniques</article-title>. <source>Remote Sens Environ.</source> <volume>187</volume>, <fpage>130</fpage>&#x2013;<lpage>144</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2016.10.007</pub-id>
</citation>
</ref>
<ref id="B20">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Evans</surname> <given-names>J. S.</given-names>
</name>
<name>
<surname>Hudak</surname> <given-names>A. T.</given-names>
</name>
</person-group> (<year>2007</year>). <article-title>A multiscale curvature algorithm for classifying discrete return LiDAR in forested environments</article-title>. <source>IEEE Trans. Geosci. Remote Sens.</source> <volume>45</volume>, <fpage>1029</fpage>&#x2013;<lpage>1038</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TGRS.2006.890412</pub-id>
</citation>
</ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Farooque</surname> <given-names>A. A.</given-names>
</name>
<name>
<surname>Chang</surname> <given-names>Y. K.</given-names>
</name>
<name>
<surname>Zaman</surname> <given-names>Q. U.</given-names>
</name>
<name>
<surname>Groulx</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Schumann</surname> <given-names>A. W.</given-names>
</name>
<name>
<surname>Esau</surname> <given-names>T. J.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>Performance evaluation of multiple ground based sensors mounted on a commercial wild blueberry harvester to sense plant height, fruit yield and topographic features in real-time</article-title>. <source>Comput. Electron Agric.</source> <volume>91</volume>, <fpage>135</fpage>&#x2013;<lpage>144</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2012.12.006</pub-id>
</citation>
</ref>
<ref id="B22">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Feng</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Sudduth</surname> <given-names>K. A.</given-names>
</name>
<name>
<surname>Vories</surname> <given-names>E. D.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Cotton yield estimation from UAV-based plant height</article-title>. <source>Trans. ASABE</source> <volume>62</volume>, <fpage>393</fpage>&#x2013;<lpage>404</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.13031/trans.13067</pub-id>
</citation>
</ref>
<ref id="B23">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gao</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Niu</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Hou</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>Estimating the Leaf Area Index, height and biomass of maize using HJ-1 and RADARSAT-2</article-title>. <source>Int. J. Appl. Earth Observation Geoinformation</source> <volume>24</volume>, <fpage>1</fpage>&#x2013;<lpage>8</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jag.2013.02.002</pub-id>
</citation>
</ref>
<ref id="B24">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gao</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Wei</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Individual maize location and height estimation in field from UAV-borne LiDAR and RGB images</article-title>. <source>Remote Sens (Basel)</source> <volume>14</volume>, <elocation-id>2292</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs14102292</pub-id>
</citation>
</ref>
<ref id="B25">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gaw&#x119;da</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Nowak</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Haliniarz</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Wo&#x17a;niak</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Yield and economic effectiveness of soybean grown under different cropping systems</article-title>. <source>Int. J. Plant Prod</source> <volume>14</volume>, <fpage>475</fpage>&#x2013;<lpage>485</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s42106-020-00098-1</pub-id>
</citation>
</ref>
<ref id="B26">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gon&#xe7;alves</surname> <given-names>J. A.</given-names>
</name>
<name>
<surname>Henriques</surname> <given-names>R.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>UAV photogrammetry for topographic monitoring of coastal areas</article-title>. <source>ISPRS J. Photogrammetry Remote Sens.</source> <volume>104</volume>, <fpage>101</fpage>&#x2013;<lpage>111</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.isprsjprs.2015.02.009</pub-id>
</citation>
</ref>
<ref id="B27">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Grenzd&#xf6;rffer</surname> <given-names>G. J.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Crop height determination with UAS point clouds</article-title>. <source>Int. Arch. Photogrammetry Remote Sens. Spatial Inf. Sci.</source> <volume>XL&#x2013;1</volume>, <fpage>135</fpage>&#x2013;<lpage>140</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5194/isprsarchives-XL-1-135-2014</pub-id>
</citation>
</ref>
<ref id="B28">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Guan</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Ma</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Three-dimensional reconstruction of soybean canopies using multisource imaging for phenotyping analysis</article-title>. <source>Remote Sens (Basel)</source> <volume>10</volume>, <elocation-id>1206</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs10081206</pub-id>
</citation>
</ref>
<ref id="B29">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Guo</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Fang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Cheng</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Tian</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>Q.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). <article-title>Detection of wheat height using optimized multi-scan mode of LiDAR during the entire growth stages</article-title>. <source>Comput. Electron Agric.</source> <volume>165</volume>, <elocation-id>104959</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2019.104959</pub-id>
</citation>
</ref>
<ref id="B30">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>H&#xe4;mmerle</surname> <given-names>M.</given-names>
</name>
<name>
<surname>H&#xf6;fle</surname> <given-names>B.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Direct derivation of maize plant and crop height from low-cost time-of-flight camera measurements</article-title>. <source>Plant Methods</source> <volume>12</volume>, <elocation-id>50</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s13007-016-0150-6</pub-id>
</citation>
</ref>
<ref id="B31">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Han</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Clustering field-based maize phenotyping of plant-height growth and canopy spectral dynamics using a UAV remote-sensing approach</article-title>. <source>Front. Plant Sci.</source> <volume>9</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2018.01638</pub-id>
</citation>
</ref>
<ref id="B32">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hu</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Chapman</surname> <given-names>S. C.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Potgieter</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Duan</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Jordan</surname> <given-names>D.</given-names>
</name>
<etal/>
</person-group>. (<year>2018</year>). <article-title>Estimation of plant height using a high throughput phenotyping platform based on unmanned aerial vehicle and self-calibration: Example for sorghum breeding</article-title>. <source>Eur. J. Agron.</source> <volume>95</volume>, <fpage>24</fpage>&#x2013;<lpage>32</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.eja.2018.02.004</pub-id>
</citation>
</ref>
<ref id="B33">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hug</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Ullrich</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Grimm</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2004</year>). <article-title>Litemapper-5600-a waveform-digitizing LiDAR terrain and vegetation mapping system</article-title>. <source>Int. Arch. Photogrammetry Remote Sens. Spatial Inf. Sci.</source> <volume>36</volume>, <fpage>W2</fpage>.</citation>
</ref>
<ref id="B34">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Imsande</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>1992</year>). <article-title>Agronomic characteristics that identify high yield, high protein soybean genotypes</article-title>. <source>Agron. J.</source> <volume>84</volume>, <fpage>409</fpage>&#x2013;<lpage>414</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.2134/agronj1992.00021962008400030012x</pub-id>
</citation>
</ref>
<ref id="B35">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jiang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Paterson</surname> <given-names>A. H.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>High throughput phenotyping of cotton plant height using depth images under field conditions</article-title>. <source>Comput. Electron Agric.</source> <volume>130</volume>, <fpage>57</fpage>&#x2013;<lpage>68</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2016.09.017</pub-id>
</citation>
</ref>
<ref id="B36">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jimenez-Berni</surname> <given-names>J. A.</given-names>
</name>
<name>
<surname>Deery</surname> <given-names>D. M.</given-names>
</name>
<name>
<surname>Rozas-Larraondo</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Condon</surname> <given-names>A.(. G.</given-names>
</name>
<name>
<surname>Rebetzke</surname> <given-names>G. J.</given-names>
</name>
<name>
<surname>James</surname> <given-names>R. A.</given-names>
</name>
<etal/>
</person-group>. (<year>2018</year>). <article-title>High throughput determination of plant height, ground cover, and above-ground biomass in wheat with LiDAR</article-title>. <source>Front. Plant Sci.</source> <volume>9</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2018.00237</pub-id>
</citation>
</ref>
<ref id="B37">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jin</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Mi</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Shen</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>X.</given-names>
</name>
<etal/>
</person-group>. (<year>2010</year>). <article-title>Agronomic and physiological contributions to the yield improvement of soybean cultivars released from 1950 to 2006 in Northeast China</article-title>. <source>Field Crops Res.</source> <volume>115</volume>, <fpage>116</fpage>&#x2013;<lpage>123</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.fcr.2009.10.016</pub-id>
</citation>
</ref>
<ref id="B38">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jing</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Wei</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Song</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>F.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Research on estimating rice canopy height and LAI based on LiDAR data</article-title>. <source>Sensors</source> <volume>23</volume>, <elocation-id>8334</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s23198334</pub-id>
</citation>
</ref>
<ref id="B39">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kalacska</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Chmura</surname> <given-names>G. L.</given-names>
</name>
<name>
<surname>Lucanus</surname> <given-names>O.</given-names>
</name>
<name>
<surname>B&#xe9;rub&#xe9;</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Arroyo-Mora</surname> <given-names>J. P.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Structure from motion will revolutionize analyses of tidal wetland landscapes</article-title>. <source>Remote Sens Environ.</source> <volume>199</volume>, <fpage>14</fpage>&#x2013;<lpage>24</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2017.06.023</pub-id>
</citation>
</ref>
<ref id="B40">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kantolic</surname> <given-names>A. G.</given-names>
</name>
<name>
<surname>Slafer</surname> <given-names>G. A.</given-names>
</name>
</person-group> (<year>2001</year>). <article-title>Photoperiod sensitivity after flowering and seed number determination in indeterminate soybean cultivars</article-title>. <source>Field Crops Res.</source> <volume>72</volume>, <fpage>109</fpage>&#x2013;<lpage>118</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/S0378-4290(01)00168-X</pub-id>
</citation>
</ref>
<ref id="B41">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Khan</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Chopin</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Cai</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Eichi</surname> <given-names>V.-R.</given-names>
</name>
<name>
<surname>Haefele</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Miklavcic</surname> <given-names>S. J.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Quantitative estimation of wheat phenotyping traits using ground and aerial imagery</article-title>. <source>Remote Sens (Basel)</source> <volume>10</volume>, <elocation-id>950</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs10060950</pub-id>
</citation>
</ref>
<ref id="B42">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lefsky</surname> <given-names>M. A.</given-names>
</name>
<name>
<surname>Cohen</surname> <given-names>W. B.</given-names>
</name>
<name>
<surname>Parker</surname> <given-names>G. G.</given-names>
</name>
<name>
<surname>Harding</surname> <given-names>D. J.</given-names>
</name>
</person-group> (<year>2002</year>). <article-title>Lidar remote sensing for ecosystem studies: Lidar, an emerging remote sensing technology that directly measures the three-dimensional distribution of plant canopies, can accurately estimate vegetation structural attributes and should be of particular interest to forest, landscape, and global ecologists</article-title>. <source>Bioscience</source> <volume>52</volume>, <fpage>19</fpage>&#x2013;<lpage>30</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1641/0006-3568(2002)052[0019:LRSFES]2.0.CO;2</pub-id>
</citation>
</ref>
<ref id="B43">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Li</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Bull</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Coe</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Eamkulworapong</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Scarrow</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Salim</surname> <given-names>M.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). &#x201c;<article-title>High-throughput plant height estimation from RGB Images acquired with Aerial platforms: a 3D point cloud based approach</article-title>,&#x201d; in <source>2019 Digital Image Computing: Techniques and Applications (DICTA)</source> (<publisher-loc>Perth, WA, Australia</publisher-loc>: <publisher-name>IEEE</publisher-name>), <fpage>1</fpage>&#x2013;<lpage>8</lpage>.</citation>
</ref>
<ref id="B44">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Kim</surname> <given-names>M. Y.</given-names>
</name>
<name>
<surname>Van</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Lee</surname> <given-names>Y.-H.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>X.</given-names>
</name>
<etal/>
</person-group>. (<year>2011</year>). <article-title>QTL identification of yield-related traits and their association with flowering and maturity in soybean</article-title>. <source>J. Crop Sci. Biotechnol.</source> <volume>14</volume>, <fpage>65</fpage>&#x2013;<lpage>70</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s12892-010-0115-7</pub-id>
</citation>
</ref>
<ref id="B45">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Shen</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Cao</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Cao</surname> <given-names>F.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Estimating forest structural attributes using UAV-LiDAR data in Ginkgo plantations</article-title>. <source>ISPRS J. Photogrammetry Remote Sens.</source> <volume>146</volume>, <fpage>465</fpage>&#x2013;<lpage>482</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.isprsjprs.2018.11.001</pub-id>
</citation>
</ref>
<ref id="B46">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>K.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>Maize height estimation using combined unmanned aerial vehicle oblique photography and LIDAR canopy dynamic characteristics</article-title>. <source>Comput. Electron Agric.</source> <volume>218</volume>, <elocation-id>108685</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.108685</pub-id>
</citation>
</ref>
<ref id="B47">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lohr</surname> <given-names>U.</given-names>
</name>
</person-group> (<year>1998</year>). <article-title>Digital elevation models by laser scanning</article-title>. <source>Photogrammetric Rec.</source> <volume>16</volume>, <fpage>105</fpage>&#x2013;<lpage>109</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/0031-868X.00117</pub-id>
</citation>
</ref>
<ref id="B48">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Luo</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Xi</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Nie</surname> <given-names>S.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Maize and soybean heights estimation from unmanned aerial vehicle (UAV) LiDAR data</article-title>. <source>Comput. Electron Agric.</source> <volume>182</volume>, <elocation-id>106005</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2021.106005</pub-id>
</citation>
</ref>
<ref id="B49">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ma</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Guan</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Feng</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>High-throughput phenotyping analysis of potted soybean plants using colorized depth images based on A proximal platform</article-title>. <source>Remote Sens (Basel)</source> <volume>11</volume>, <elocation-id>1085</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs11091085</pub-id>
</citation>
</ref>
<ref id="B50">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Madec</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Baret</surname> <given-names>F.</given-names>
</name>
<name>
<surname>de Solan</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Thomas</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Dutartre</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Jezequel</surname> <given-names>S.</given-names>
</name>
<etal/>
</person-group>. (<year>2017</year>). <article-title>High-throughput phenotyping of plant height: comparing unmanned aerial vehicles and ground LiDAR estimates</article-title>. <source>Front. Plant Sci.</source> <volume>8</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2017.02002</pub-id>
</citation>
</ref>
<ref id="B51">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Maimaitijiang</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Sagan</surname> <given-names>V.</given-names>
</name>
<name>
<surname>Erkbol</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Adrian</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Newcomb</surname> <given-names>M.</given-names>
</name>
<name>
<surname>LeBauer</surname> <given-names>D.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>UAV-based sorghum growth monitoring: A comparative analysis of Lidar and photogrammetry</article-title>. <source>ISPRS Ann. Photogrammetry Remote Sens. Spatial Inf. Sci.</source> <volume>V-3&#x2013;2020</volume>, <fpage>489</fpage>&#x2013;<lpage>496</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5194/isprs-annals-V-3-2020-489-2020</pub-id>
</citation>
</ref>
<ref id="B52">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Malambo</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Popescu</surname> <given-names>S. C.</given-names>
</name>
<name>
<surname>Murray</surname> <given-names>S. C.</given-names>
</name>
<name>
<surname>Putman</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Pugh</surname> <given-names>N. A.</given-names>
</name>
<name>
<surname>Horne</surname> <given-names>D. W.</given-names>
</name>
<etal/>
</person-group>. (<year>2018</year>). <article-title>Multitemporal field-based plant height estimation using 3D point clouds generated from small unmanned aerial systems high-resolution imagery</article-title>. <source>Int. J. Appl. Earth Observation Geoinformation</source> <volume>64</volume>, <fpage>31</fpage>&#x2013;<lpage>42</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jag.2017.08.014</pub-id>
</citation>
</ref>
<ref id="B53">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Morrison</surname> <given-names>M. J.</given-names>
</name>
<name>
<surname>Gahagan</surname> <given-names>A. C.</given-names>
</name>
<name>
<surname>Lefebvre</surname> <given-names>M. B.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Measuring canopy height in soybean and wheat using a low-cost depth camera</article-title>. <source>Plant Phenome J.</source> <volume>4</volume>, <fpage>1</fpage>&#x2013;<lpage>6</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/ppj2.20019</pub-id>
</citation>
</ref>
<ref id="B54">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Mulla</surname> <given-names>D. J.</given-names>
</name>
<name>
<surname>Belmont</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2018</year>). <source>Identifying and Characterizing Ravines with GIS Terrain Attributes for Precision Conservation</source>. (<publisher-loc>Madison, WI, USA</publisher-loc>: <publisher-name>ASA, CSSA, SSSA</publisher-name>), <fpage>109</fpage>&#x2013;<lpage>129</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.2134/agronmonogr59.c6</pub-id>
</citation>
</ref>
<ref id="B55">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ning</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Yuan</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Dong</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Xue</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Y.</given-names>
</name>
<etal/>
</person-group>. (<year>2018</year>). <article-title>Identification of QTLs related to the vertical distribution and seed-set of pod number in soybean [Glycine max (L.) Merri</article-title>. <source>PLoS One</source> <volume>13</volume>, <elocation-id>e0195830</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pone.0195830</pub-id>
</citation>
</ref>
<ref id="B56">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Oke</surname> <given-names>O. A.</given-names>
</name>
<name>
<surname>Thompson</surname> <given-names>K. A.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Distribution models for mountain plant species: The value of elevation</article-title>. <source>Ecol. Modell</source> <volume>301</volume>, <fpage>72</fpage>&#x2013;<lpage>77</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ecolmodel.2015.01.019</pub-id>
</citation>
</ref>
<ref id="B57">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Omasa</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Hosoi</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Konishi</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2006</year>). <article-title>3D lidar imaging for detecting and understanding plant responses and canopy structure</article-title>. <source>J. Exp. Bot.</source> <volume>58</volume>, <fpage>881</fpage>&#x2013;<lpage>898</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/jxb/erl142</pub-id>
</citation>
</ref>
<ref id="B58">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Parker</surname> <given-names>G. G.</given-names>
</name>
<name>
<surname>Harding</surname> <given-names>D. J.</given-names>
</name>
<name>
<surname>Berger</surname> <given-names>M. L.</given-names>
</name>
</person-group> (<year>2004</year>). <article-title>A portable LIDAR system for rapid determination of forest canopy structure</article-title>. <source>J. Appl. Ecol.</source> <volume>41</volume>, <fpage>755</fpage>&#x2013;<lpage>767</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/j.0021-8901.2004.00925.x</pub-id>
</citation>
</ref>
<ref id="B59">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Patel</surname> <given-names>A. K.</given-names>
</name>
<name>
<surname>Park</surname> <given-names>E.-S.</given-names>
</name>
<name>
<surname>Lee</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Priya</surname> <given-names>G. G. L.</given-names>
</name>
<name>
<surname>Kim</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Joshi</surname> <given-names>R.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Deep learning-based plant organ segmentation and phenotyping of sorghum plants using LiDAR point cloud</article-title>. <source>IEEE J. Sel Top. Appl. Earth Obs Remote Sens</source> <volume>16</volume>, <fpage>8492</fpage>&#x2013;<lpage>8507</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/JSTARS.2023.3312815</pub-id>
</citation>
</ref>
<ref id="B60">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Petrou</surname> <given-names>Z. I.</given-names>
</name>
<name>
<surname>Tarantino</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Adamo</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Blonda</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Petrou</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>Estimation of vegetation height through satellite image texture analysis</article-title>. <source>Int. Arch. Photogrammetry Remote Sens. Spatial Inf. Sci.</source> <volume>XXXIX-B8</volume>, <fpage>321</fpage>&#x2013;<lpage>326</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5194/isprsarchives-xxxix-b8-321-2012</pub-id>
</citation>
</ref>
<ref id="B61">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Phan</surname> <given-names>A. T. T.</given-names>
</name>
<name>
<surname>Takahashi</surname> <given-names>K.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Estimation of rice plant height from a low-cost UAVBased lidar point clouds</article-title>. <source>Int. J. Geoinformatics</source> <volume>17</volume>, <fpage>89</fpage>&#x2013;<lpage>98</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.52939/ijg.v17i2.1765</pub-id>
</citation>
</ref>
<ref id="B62">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Qiu</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Bai</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Fan</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Y.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). <article-title>Field-based high-throughput phenotyping for maize plant using 3D LiDAR point cloud generated with a &#x201c;Phenomobile</article-title>. <source>Front. Plant Sci.</source> <volume>10</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2019.00554</pub-id>
</citation>
</ref>
<ref id="B63">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Roberts</surname> <given-names>K. C.</given-names>
</name>
<name>
<surname>Lindsay</surname> <given-names>J. B.</given-names>
</name>
<name>
<surname>Berg</surname> <given-names>A. A.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>An analysis of ground-point classifiers for terrestrial LiDAR</article-title>. <source>Remote Sens (Basel)</source> <volume>11</volume>, <fpage>1915</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs11161915</pub-id>
</citation>
</ref>
<ref id="B64">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Roth</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Barendregt</surname> <given-names>C.</given-names>
</name>
<name>
<surname>B&#xe9;trix</surname> <given-names>C.-A.</given-names>
</name>
<name>
<surname>Hund</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Walter</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>High-throughput field phenotyping of soybean: Spotting an ideotype</article-title>. <source>Remote Sens Environ.</source> <volume>269</volume>, <elocation-id>112797</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2021.112797</pub-id>
</citation>
</ref>
<ref id="B65">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Saeys</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Lenaerts</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Craessaerts</surname> <given-names>G.</given-names>
</name>
<name>
<surname>De Baerdemaeker</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2009</year>). <article-title>Estimation of the crop density of small grains using LiDAR sensors</article-title>. <source>Biosyst. Eng.</source> <volume>102</volume>, <fpage>22</fpage>&#x2013;<lpage>30</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2008.10.003</pub-id>
</citation>
</ref>
<ref id="B66">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sharma</surname> <given-names>L. K.</given-names>
</name>
<name>
<surname>Bu</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Franzen</surname> <given-names>D. W.</given-names>
</name>
<name>
<surname>Denton</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Use of corn height measured with an acoustic sensor improves yield estimation with ground based active optical sensors</article-title>. <source>Comput. Electron Agric.</source> <volume>124</volume>, <fpage>254</fpage>&#x2013;<lpage>262</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2016.04.016</pub-id>
</citation>
</ref>
<ref id="B67">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Smith</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Rheinwalt</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Bookhagen</surname> <given-names>B.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Determining the optimal grid resolution for topographic analysis on an airborne lidar dataset</article-title>. <source>Earth Surface Dynamics</source> <volume>7</volume>, <fpage>475</fpage>&#x2013;<lpage>489</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5194/esurf-7-475-2019</pub-id>
</citation>
</ref>
<ref id="B68">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Su</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Bian</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>W.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). <article-title>Phenotyping of corn plants using unmanned aerial vehicle (UAV) images</article-title>. <source>Remote Sens (Basel)</source> <volume>11</volume>, <elocation-id>2021</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs11172021</pub-id>
</citation>
</ref>
<ref id="B69">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sun</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Paterson</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>In-field high-throughput phenotyping of cotton plant height using LiDAR</article-title>. <source>Remote Sens (Basel)</source> <volume>9</volume>, <elocation-id>377</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs9040377</pub-id>
</citation>
</ref>
<ref id="B70">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sun</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Paterson</surname> <given-names>A. H.</given-names>
</name>
<name>
<surname>Jiang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Robertson</surname> <given-names>J. S.</given-names>
</name>
<etal/>
</person-group>. (<year>2018</year>). <article-title>In-field high throughput phenotyping and cotton plant growth analysis using LiDAR</article-title>. <source>Front. Plant Sci.</source> <volume>9</volume>, <elocation-id>16</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2018.00016</pub-id>
</citation>
</ref>
<ref id="B71">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sun</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Luo</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>P.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Estimation of crop height distribution for mature rice based on a moving surface and 3D point cloud elevation</article-title>. <source>Agronomy</source> <volume>12</volume>, <elocation-id>836</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy12040836</pub-id>
</citation>
</ref>
<ref id="B72">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>ten Harkel</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Bartholomeus</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Kooistra</surname> <given-names>L.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Biomass and crop height estimation of different crops using UAV-based Lidar</article-title>. <source>Remote Sens (Basel)</source> <volume>12</volume>, <elocation-id>17</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs12010017</pub-id>
</citation>
</ref>
<ref id="B73">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Thompson</surname> <given-names>A. L.</given-names>
</name>
<name>
<surname>Thorp</surname> <given-names>K. R.</given-names>
</name>
<name>
<surname>Conley</surname> <given-names>M. M.</given-names>
</name>
<name>
<surname>Elshikha</surname> <given-names>D. M.</given-names>
</name>
<name>
<surname>French</surname> <given-names>A. N.</given-names>
</name>
<name>
<surname>Andrade-Sanchez</surname> <given-names>P.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). <article-title>Comparing nadir and multi-angle view sensor technologies for measuring in-field plant height of upland cotton</article-title>. <source>Remote Sens (Basel)</source> <volume>11</volume>, <elocation-id>700</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs11060700</pub-id>
</citation>
</ref>
<ref id="B74">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tilly</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Aasen</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Bareth</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Fusion of plant height and vegetation indices for the estimation of barley biomass</article-title>. <source>Remote Sens (Basel)</source> <volume>7</volume>, <fpage>11449</fpage>&#x2013;<lpage>11480</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs70911449</pub-id>
</citation>
</ref>
<ref id="B75">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tilly</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Hoffmeister</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Cao</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Lenz-Wiedemann</surname> <given-names>V.</given-names>
</name>
<name>
<surname>Miao</surname> <given-names>Y.</given-names>
</name>
<etal/>
</person-group>. (<year>2014</year>a). <article-title>Multitemporal crop surface models: accurate plant height measurement and biomass estimation with terrestrial laser scanning in paddy rice</article-title>. <source>J. Appl. Remote Sens</source> <volume>8</volume>, <elocation-id>83671</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1117/1.JRS.8.083671</pub-id>
</citation>
</ref>
<ref id="B76">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tilly</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Hoffmeister</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Schiedung</surname> <given-names>H.</given-names>
</name>
<name>
<surname>H&#xfc;tt</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Brands</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Bareth</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2014</year>b). <article-title>Terrestrial laser scanning for plant height measurement and biomass estimation of maize</article-title>. <source>Int. Arch. Photogrammetry Remote Sens. Spatial Inf. Sci.</source> <volume>XL&#x2013;7</volume>, <fpage>181</fpage>&#x2013;<lpage>187</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5194/isprsarchives-XL-7-181-2014</pub-id>
</citation>
</ref>
<ref id="B77">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tischner</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Allphin</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Chase</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Orf</surname> <given-names>J. H.</given-names>
</name>
<name>
<surname>Lark</surname> <given-names>K. G.</given-names>
</name>
</person-group> (<year>2003</year>). <article-title>Genetics of seed abortion and reproductive traits in soybean</article-title>. <source>Crop Sci.</source> <volume>43</volume>, <fpage>464</fpage>&#x2013;<lpage>473</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.2135/cropsci2003.4640</pub-id>
</citation>
</ref>
<ref id="B78">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Toutin</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2004</year>). <article-title>Review article: Geometric processing of remote sensing images: models, algorithms and methods</article-title>. <source>Int. J. Remote Sens</source> <volume>25</volume>, <fpage>1893</fpage>&#x2013;<lpage>1924</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/0143116031000101611</pub-id>
</citation>
</ref>
<ref id="B79">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tunca</surname> <given-names>E.</given-names>
</name>
<name>
<surname>K&#xf6;ksal</surname> <given-names>E. S.</given-names>
</name>
<name>
<surname>Taner</surname> <given-names>S.&#xc7;.</given-names>
</name>
<name>
<surname>Akay</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Crop height estimation of sorghum from high resolution multispectral images using the structure from motion (SfM) algorithm</article-title>. <source>Int. J. Environ. Sci. Technol.</source> <volume>21</volume>, <fpage>1981</fpage>&#x2013;<lpage>1992</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s13762-023-05265-1</pub-id>
</citation>
</ref>
<ref id="B80">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Turner</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Lucieer</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Watson</surname> <given-names>C.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>An automated technique for generating georectified mosaics from ultra-high resolution unmanned aerial vehicle (UAV) imagery, based on structure from motion (SfM) point clouds</article-title>. <source>Remote Sens (Basel)</source> <volume>4</volume>, <fpage>1392</fpage>&#x2013;<lpage>1410</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs4051392</pub-id>
</citation>
</ref>
<ref id="B81">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Vaiknoras</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Hubbs</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2023</year>). <source>Characteristics and Trends of US Soybean Production Practices, Costs, and Returns Since 2002</source>. (<publisher-loc>Washington, DC, USA</publisher-loc>: <publisher-name>U.S. Department of Agriculture, Economic Research Service</publisher-name>).</citation>
</ref>
<ref id="B82">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Volpato</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Pinto</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Gonz&#xe1;lez-P&#xe9;rez</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Thompson</surname> <given-names>I. G.</given-names>
</name>
<name>
<surname>Bor&#xe9;m</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Reynolds</surname> <given-names>M.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>High throughput field phenotyping for plant height using UAV-based RGB imagery in wheat breeding lines: feasibility and validation</article-title>. <source>Front. Plant Sci.</source> <volume>12</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2021.591587</pub-id>
</citation>
</ref>
<ref id="B83">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Waliman</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Zakhor</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Deep learning method for height estimation of sorghum in the field using LiDAR</article-title>. <source>Electronic Imaging</source> <volume>32</volume>, <fpage>1</fpage>&#x2013;<lpage>7</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.2352/ISSN.2470-1173.2020.14.COIMG-343</pub-id>
</citation>
</ref>
<ref id="B84">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Singh</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Marla</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Morris</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Poland</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Field-based high-throughput phenotyping of plant height in sorghum using different sensing technologies</article-title>. <source>Plant Methods</source> <volume>14</volume>, <fpage>53</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s13007-018-0324-5</pub-id>
</citation>
</ref>
<ref id="B85">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>H.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>
<italic>GmFtsH25</italic> overexpression increases soybean seed yield by enhancing photosynthesis and photosynthates</article-title>. <source>J. Integr. Plant Biol.</source> <volume>65</volume>, <fpage>1026</fpage>&#x2013;<lpage>1040</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/jipb.13405</pub-id>
</citation>
</ref>
<ref id="B86">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Watanabe</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Guo</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Arai</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Takanashi</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Kajiya-Kanegae</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Kobayashi</surname> <given-names>M.</given-names>
</name>
<etal/>
</person-group>. (<year>2017</year>). <article-title>High-throughput phenotyping of sorghum plant height using an unmanned aerial vehicle and its application to genomic prediction modeling</article-title>. <source>Front. Plant Sci.</source> <volume>8</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2017.00421</pub-id>
</citation>
</ref>
<ref id="B87">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wijesingha</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Moeckel</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Hensgen</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Wachendorf</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Evaluation of 3D point cloud-based models for the prediction of grassland biomass</article-title>. <source>Int. J. Appl. Earth Observation Geoinformation</source> <volume>78</volume>, <fpage>352</fpage>&#x2013;<lpage>359</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jag.2018.10.006</pub-id>
</citation>
</ref>
<ref id="B88">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Wilcox</surname> <given-names>J. R.</given-names>
</name>
</person-group> (<year>2016</year>). <source>World Distribution and Trade of Soybean</source>. (<publisher-loc>Madison, WI, USA</publisher-loc>: <publisher-name>American Society of Agronomy</publisher-name>), <fpage>1-14</fpage>&#x2013;<lpage>2</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.2134/agronmonogr16.3ed.c1</pub-id>
</citation>
</ref>
<ref id="B89">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Xu</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Paterson</surname> <given-names>A. H.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Multispectral imaging and unmanned aerial systems for cotton plant phenotyping</article-title>. <source>PloS One</source> <volume>14</volume>, <elocation-id>e0205083</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pone.0205083</pub-id>
</citation>
</ref>
<ref id="B90">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Xu</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Zhao</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Zheng</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Zhao</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>X.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Correction of UAV LiDAR-derived grassland canopy height based on scan angle</article-title>. <source>Front. Plant Sci.</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2023.1108109</pub-id>
</citation>
</ref>
<ref id="B91">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ye</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Abbas</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Liang</surname> <given-names>C.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>UAV -based time-series phenotyping reveals the genetic basis of plant height in upland cotton</article-title>. <source>Plant J.</source> <volume>115</volume>, <fpage>937</fpage>&#x2013;<lpage>951</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/tpj.16272</pub-id>
</citation>
</ref>
<ref id="B92">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yin</surname> <given-names>X.</given-names>
</name>
<name>
<surname>McClure</surname> <given-names>M. A.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>Relationship of corn yield, biomass, and leaf nitrogen with normalized difference vegetation index and plant height</article-title>. <source>Agron. J.</source> <volume>105</volume>, <fpage>1005</fpage>&#x2013;<lpage>1016</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.2134/agronj2012.0206</pub-id>
</citation>
</ref>
<ref id="B93">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yin</surname> <given-names>X.</given-names>
</name>
<name>
<surname>McClure</surname> <given-names>M. A.</given-names>
</name>
<name>
<surname>Jaja</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Tyler</surname> <given-names>D. D.</given-names>
</name>
<name>
<surname>Hayes</surname> <given-names>R. M.</given-names>
</name>
</person-group> (<year>2011</year>). <article-title>In-season prediction of corn yield using plant height under major production systems</article-title>. <source>Agron. J.</source> <volume>103</volume>, <fpage>923</fpage>&#x2013;<lpage>929</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.2134/agronj2010.0450</pub-id>
</citation>
</ref>
<ref id="B94">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yuan</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Bhatta</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Shi</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Baenziger</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Ge</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Wheat height estimation using LiDAR in comparison to ultrasonic sensor and UAS</article-title>. <source>Sensors</source> <volume>18</volume>, <elocation-id>3731</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s18113731</pub-id>
</citation>
</ref>
<ref id="B95">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>S.-C.</given-names>
</name>
<name>
<surname>Whitman</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Shyu</surname> <given-names>M.-L.</given-names>
</name>
<name>
<surname>Yan</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>C.</given-names>
</name>
</person-group> (<year>2003</year>). <article-title>A progressive morphological filter for removing nonground measurements from airborne LIDAR data</article-title>. <source>IEEE Trans. Geosci. Remote Sens.</source> <volume>41</volume>, <fpage>872</fpage>&#x2013;<lpage>882</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TGRS.2003.810682</pub-id>
</citation>
</ref>
<ref id="B96">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Marzougui</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Sankaran</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>High-resolution satellite imagery applications in crop phenotyping: An overview</article-title>. <source>Comput. Electron Agric.</source> <volume>175</volume>, <fpage>105584</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2020.105584</pub-id>
</citation>
</ref>
<ref id="B97">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Qi</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Wan</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Xie</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>X.</given-names>
</name>
<etal/>
</person-group>. (<year>2016</year>). <article-title>An easy-to-use airborne LiDAR data filtering method based on cloth simulation</article-title>. <source>Remote Sens (Basel)</source> <volume>8</volume>, <elocation-id>501</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs8060501</pub-id>
</citation>
</ref>
<ref id="B98">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Lin</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>J.</given-names>
</name>
<etal/>
</person-group>. (<year>2017</year>). <article-title>OsMPH1 regulates plant height and improves grain yield in rice</article-title>. <source>PloS One</source> <volume>12</volume>, <elocation-id>e0180825</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pone.0180825</pub-id>
</citation>
</ref>
<ref id="B99">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhao</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Su</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Cao</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>Q.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>Analysis of UAV lidar information loss and its influence on the estimation accuracy of structural and functional traits in a meadow steppe</article-title>. <source>Ecol. Indic</source> <volume>135</volume>, <elocation-id>108515</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ecolind.2021.108515</pub-id>
</citation>
</ref>
<ref id="B100">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhou</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Gu</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Cheng</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Shu</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>Q.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Analysis of plant height changes of lodged maize using UAV-LiDAR data</article-title>. <source>Agriculture</source> <volume>10</volume>, <elocation-id>146</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture10050146</pub-id>
</citation>
</ref>
<ref id="B101">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Zhou</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Sankaran</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Khot</surname> <given-names>L. R.</given-names>
</name>
<name>
<surname>Pumphrey</surname> <given-names>M. O.</given-names>
</name>
<name>
<surname>Carter</surname> <given-names>A. H.</given-names>
</name>
</person-group> (<year>2015</year>). &#x201c;<article-title>Crop height estimation in wheat using proximal sensing techniques</article-title>,&#x201d; in <source>2015 ASABE Annual International Meeting</source> (<publisher-loc>St.Joseph, MI, USA</publisher-loc>: <publisher-name>American Society of Agricultural and Biological Engineers</publisher-name>), <fpage>1</fpage>.</citation>
</ref>
<ref id="B102">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhu</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Yan</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Yan</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Shi</surname> <given-names>J.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>Analysing the phenotype development of soybean plants using low-cost 3D reconstruction</article-title>. <source>Sci. Rep.</source> <volume>10</volume>, <fpage>7055</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41598-020-63720-2</pub-id>
</citation>
</ref>
</ref-list>
</back>
</article>