<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Plant Sci.</journal-id>
<journal-title>Frontiers in Plant Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Plant Sci.</abbrev-journal-title>
<issn pub-type="epub">1664-462X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpls.2024.1371252</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Plant Science</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Detection of maize stem diameter by using RGB-D cameras&#x2019; depth information under selected field condition</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name>
<surname>Zhou</surname>
<given-names>Jing</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="author-notes" rid="fn003">
<sup>&#x2020;</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2624330"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Cui</surname>
<given-names>Mingren</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="author-notes" rid="fn003">
<sup>&#x2020;</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2717177"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Wu</surname>
<given-names>Yushan</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Gao</surname>
<given-names>Yudi</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Tang</surname>
<given-names>Yijia</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Jiang</surname>
<given-names>Bowen</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Wu</surname>
<given-names>Min</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Zhang</surname>
<given-names>Jian</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<xref ref-type="author-notes" rid="fn001">
<sup>*</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/733486"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Hou</surname>
<given-names>Lixin</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="author-notes" rid="fn001">
<sup>*</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
</contrib-group>
<aff id="aff1">
<sup>1</sup>
<institution>College of Information Technology, Jilin Agricultural University</institution>, <addr-line>Changchun</addr-line>, <country>China</country>
</aff>
<aff id="aff2">
<sup>2</sup>
<institution>Faculty of Agronomy, Jilin Agricultural University</institution>, <addr-line>Changchun</addr-line>, <country>China</country>
</aff>
<aff id="aff3">
<sup>3</sup>
<institution>Department of Biology, University of British Columbia, Okanagan</institution>, <addr-line>Kelowna, BC</addr-line>, <country>Canada</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>Edited by: Jennifer Clarke, University of Nebraska-Lincoln, United States</p>
</fn>
<fn fn-type="edited-by">
<p>Reviewed by: Vladimir Torres, University of Nebraska-Lincoln, United States</p>
<p>Michael Tross, University of Nebraska-Lincoln, United States</p>
</fn>
<fn fn-type="corresp" id="fn001">
<p>*Correspondence: Jian Zhang, <email xlink:href="mailto:jian.zhang@ubc.ca">jian.zhang@ubc.ca</email>; Lixin Hou, <email xlink:href="mailto:lixinh@jlau.edu.cn">lixinh@jlau.edu.cn</email>
</p>
</fn>
<fn fn-type="other" id="fn003">
<p>&#x2020;These authors share first authorship</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>22</day>
<month>04</month>
<year>2024</year>
</pub-date>
<pub-date pub-type="collection">
<year>2024</year>
</pub-date>
<volume>15</volume>
<elocation-id>1371252</elocation-id>
<history>
<date date-type="received">
<day>16</day>
<month>01</month>
<year>2024</year>
</date>
<date date-type="accepted">
<day>09</day>
<month>04</month>
<year>2024</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2024 Zhou, Cui, Wu, Gao, Tang, Jiang, Wu, Zhang and Hou</copyright-statement>
<copyright-year>2024</copyright-year>
<copyright-holder>Zhou, Cui, Wu, Gao, Tang, Jiang, Wu, Zhang and Hou</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<p>Stem diameter is a critical phenotypic parameter for maize, integral to yield prediction and lodging resistance assessment. Traditionally, the quantification of this parameter through manual measurement has been the norm, notwithstanding its tedious and laborious nature. To address these challenges, this study introduces a non-invasive field-based system utilizing depth information from RGB-D cameras to measure maize stem diameter. This technology offers a practical solution for conducting rapid and non-destructive phenotyping. Firstly, RGB images, depth images, and 3D point clouds of maize stems were captured using an RGB-D camera, and precise alignment between the RGB and depth images was achieved. Subsequently, the contours of maize stems were delineated using 2D image processing techniques, followed by the extraction of the stem&#x2019;s skeletal structure employing a thinning-based skeletonization algorithm. Furthermore, within the areas of interest on the maize stems, horizontal lines were constructed using points on the skeletal structure, resulting in 2D pixel coordinates at the intersections of these horizontal lines with the maize stem contours. Subsequently, a back-projection transformation from 2D pixel coordinates to 3D world coordinates was achieved by combining the depth data with the camera&#x2019;s intrinsic parameters. The 3D world coordinates were then precisely mapped onto the 3D point cloud using rigid transformation techniques. Finally, the maize stem diameter was sensed and determined by calculating the Euclidean distance between pairs of 3D world coordinate points. The method demonstrated a Mean Absolute Percentage Error (<italic>MAPE</italic>) of 3.01%, a Mean Absolute Error (<italic>MAE</italic>) of 0.75&#xa0;mm, a Root Mean Square Error (<italic>RMSE</italic>) of 1.07&#xa0;mm, and a coefficient of determination (<italic>R</italic>&#xb2;) of 0.96, ensuring accurate measurement of maize stem diameter. This research not only provides a new method of precise and efficient crop phenotypic analysis but also offers theoretical knowledge for the advancement of precision agriculture.</p>
</abstract>
<kwd-group>
<kwd>crop phenotyping</kwd>
<kwd>RGB-D</kwd>
<kwd>depth information</kwd>
<kwd>field maize</kwd>
<kwd>stem diameter</kwd>
</kwd-group>
<counts>
<fig-count count="13"/>
<table-count count="2"/>
<equation-count count="10"/>
<ref-count count="44"/>
<page-count count="16"/>
<word-count count="7875"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-in-acceptance</meta-name>
<meta-value>Technical Advances in Plant Science</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>The global population has now surpassed 8 billion and is projected to reach more than 9 billion by the year 2050 (<xref ref-type="bibr" rid="B32">Rahimifard et&#xa0;al., 2013</xref>). This necessitates an increase in crop yield by 70% in order to meet the growing global food requirements (<xref ref-type="bibr" rid="B36">Wang, 2022</xref>). However, agricultural production is facing unprecedented challenges including global climate change, natural disasters, and intense human activities, making the acceleration of breeding research particularly crucial. In recent years, as the cost of gene sequencing has steadily decreased and its speed has increased, agronomic experts have collected a vast array of crop genotypic information. Nevertheless, over the past few decades, the development of crop phenotyping technologies has lagged behind (<xref ref-type="bibr" rid="B33">Shen et&#xa0;al., 2022</xref>). In particular, the capacity for precise measurement of small-sized phenotypes in open field environments is relatively limited, requiring a substantial amount of manual labor. This method is not only costly but also inefficient. Particularly under conditions of high temperatures, intense light, and long work periods, the subjectivity and potential for error in data measurement can increase significantly. Therefore, it is essential to research crop phenotyping monitoring technologies that offer a relatively higher degree of automation and measurement accuracy with lower costs.</p>
<p>Maize (<italic>Zea mays</italic> L.) is one of the most important cereal crops in the world, distinguished by its prodigious productivity, substantial nutritive value, and amenability to biotechnological interventions. Such characteristics render it a model crop for diverse applications, ranging from alimentation to scientific investigation and bioenergy production (<xref ref-type="bibr" rid="B6">Bothast and Schlicher, 2005</xref>; <xref ref-type="bibr" rid="B11">Duvick, 2005</xref>; <xref ref-type="bibr" rid="B28">Nuss and Tanumihardjo, 2010</xref>). In the array of phenotypic characteristics of maize, stem diameter assumes a pivotal role, serving not only as an indicator for forecasting yield and assessing lodging resistance but also as a predictive measure for the seasonal biomass accumulation in maize (<xref ref-type="bibr" rid="B19">Kelly et&#xa0;al., 2015</xref>; <xref ref-type="bibr" rid="B27">Mousavi et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B22">Liu et&#xa0;al., 2022</xref>). Employing non-invasive imaging techniques for the <italic>in situ</italic> measurement of maize stem diameter could substantially improve the efficiency of breeding research. <xref ref-type="bibr" rid="B5">Batz et&#xa0;al. (2016)</xref> utilized a dual-camera system composed of red-green-blue (RGB) and time-of-flight (TOF) cameras to capture images of indoor-grown sorghum plants. The actual stem diameter was deduced from these images by applying a pixel length conversion factor, yielding an <italic>R</italic>&#xb2; of 0.70. Notwithstanding, incongruities in the field of view (FOV) between RGB and TOF cameras can result in disparate positioning of the same object within each camera&#x2019;s perspective. This discrepancy poses challenges for the accurate alignment of RGB and TOF images, a problem that remains unresolved. <xref ref-type="bibr" rid="B40">Zhang and Grift (2012)</xref> utilized a sensor comprising a charge-coupled device (CCD) camera in conjunction with an oblique laser sheet to image Miscanthus stems. They accurately measured stem diameters using 2D image processing methods grounded in the principles of pinhole imaging, achieving an <italic>R</italic>&#xb2; of 0.926. Despite the widespread application of 2D image processing technology in crop phenotyping, it presents constraints when characterizing phenotypic parameters in 3D space. Therefore, the fusion of 2D image processing with depth-perception technologies is expected to enhance the accuracy and reliability in the acquisition of crop phenotypic parameters (<xref ref-type="bibr" rid="B9">Chene et&#xa0;al., 2012</xref>; <xref ref-type="bibr" rid="B38">Wang and Li, 2014</xref>; <xref ref-type="bibr" rid="B25">Malik et&#xa0;al., 2019</xref>). <xref ref-type="bibr" rid="B39">Xu et&#xa0;al. (2023)</xref> captured color, depth, and near-infrared (NIR) images of cucumber seedlings within a controlled greenhouse setting employing dual Azure Kinect depth cameras. Segmentation of the foliage and stem components was accomplished through the application of a Mask R-CNN framework on the NIR images. Leveraging the approximate rectangular characteristic of cucumber seedling stems and incorporating depth information, researchers have computed the stem diameter of these seedlings. The <italic>R</italic>&#xb2; exceeded 0.82. The experimental environment of this study is controllable, with the effects of ambient light, shadows, and wind being negligible, providing ideal conditions for crop phenotyping monitoring. Additionally, in controlled environment potted crop phenotyping systems, not only can environmental factors be precisely regulated, but efficient and accurate phenotypic analyses are often performed through the application of an electric turntable or a scanning device integrated with a stepper motor, further enhancing the precision of data collection (<xref ref-type="bibr" rid="B37">Wang and Chen, 2020</xref>; <xref ref-type="bibr" rid="B1">Arief et&#xa0;al., 2021</xref>).</p>
<p>Although the indoor experimental setting offers precise control over variables such as light, temperature, and background, thus creating nearly ideal conditions for the accurate measurement and analysis of crop phenotypes, the complexity and unpredictability of outdoor environments pose challenges to crop phenotyping analysis. Nevertheless, the analysis of crop phenotypes under field environments is confounded by a multiplicity of variables, including fluctuations in lighting conditions, topographical variation, and variations in plant density. During the initial phases of crop growth, top-view RGB imaging is employed to analyze the phenotypic characteristics of crops in open field environments (<xref ref-type="bibr" rid="B21">Liu et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B44">Zhou et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B31">Qiu et&#xa0;al., 2021</xref>). <xref ref-type="bibr" rid="B20">Li et&#xa0;al. (2021)</xref> acquired top-view images of maize seedlings with an EOS5DIII digital camera and employed convolutional neural network (CNN) algorithms to separate the seedlings from their background. Morphological features of the maize seedlings were then extracted using edge detection, connective domain markers, and morphological operations. Furthermore, this research transformed the RGB data of the images into the hue saturation value (HSV) color model to facilitate the extraction of the colorimetric properties of the seedlings. Concurrent with the rapid growth of crops during their initial stages, side-view imaging technology is being progressively utilized for phenotypic analysis in open field environments (<xref ref-type="bibr" rid="B3">Baharav et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B34">Song et&#xa0;al., 2019</xref>). <xref ref-type="bibr" rid="B30">Qiao et&#xa0;al. (2022)</xref> acquired side-view images of red jujube tree trunks utilizing an Intel RealSense D435i camera and separated the trunks from the background using an improved neural network model and the Maximum Between-Class Variance (Otsu) algorithm. The pixel stem diameter of the red jujube tree trunks was measured using the Euclidean distance, and the actual stem diameter was calculated based on depth information and intrinsic camera parameters, with an average absolute error of 5.02&#xa0;mm. The study capitalized on the prominent linear features of red jujube trunks to extract skeletal information from crop images for stem diameter estimation, resulting in a high level of precision. Furthermore, the experiment necessitated considerable computational capacities, entailing elevated operational processing demands.</p>
<p>In previous studies, we primarily relied on the use of external reference objects, such as chessboards, in combination with RGB images from RGB-D cameras for measuring the diameter of maize stems. Although this method is simple and effective, it is complex to operate in the field and susceptible to external environmental influences (<xref ref-type="bibr" rid="B42">Zhou et&#xa0;al., 2023a</xref>, <xref ref-type="bibr" rid="B43">Zhou et&#xa0;al., 2023b</xref>). This study proposes a novel measurement method that aligns RGB and depth images and utilizes back-projection technology to convert 2D coordinates into 3D spatial coordinates. This makes it possible to precisely measure the diameter of maize stems without the need for external reference objects. Furthermore, we directly extracted the necessary key information from 2D images and mapped the 3D coordinates into the 3D point cloud, avoiding complex processing of large volumes of 3D point cloud data. While maintaining measurement accuracy, this approach reduces the computational burden. This method not only simplifies the field measurement process but also reduces the reliance on high-cost equipment and complex data processing, significantly lowering the economic cost of research. Furthermore, it offers an efficient and accurate pathway for digital agriculture and crop phenotypic analysis.</p>
</sec>
<sec id="s2" sec-type="materials|methods">
<label>2</label>
<title>Materials and methods</title>
<sec id="s2_1">
<label>2.1</label>
<title>System architecture</title>
<p>The acquisition of field maize stem diameter using depth information from an RGB-D camera can be divided into three parts: data collection, data processing, and data analysis. The architecture of the system is shown in <xref ref-type="fig" rid="f1">
<bold>Figure&#xa0;1</bold>
</xref>.</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>Scheme of system architecture for acquisition of field maize stem diameter using depth information.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1371252-g001.tif"/>
</fig>
</sec>
<sec id="s2_2">
<label>2.2</label>
<title>Camera calibration</title>
<p>Despite the Intel RealSense D435i camera is precisely self-calibrated before shipment, the operation in a cornfield environment, which involves prolonged exposure to high temperatures and intense sunlight, may compromise its accuracy. Accordingly, it is imperative to undertake self-calibration of the camera. The Depth Quality Tool v2.54.1 was employed for on-chip calibration, focal length calibration, and tare calibration. On-chip calibration is primarily aimed at reducing noise in depth data, and focal length calibration is performed to correct distortions in depth maps that result from focal length imbalances. Tare correction is implemented to enhance the precision of depth measurements. After completing the on-chip calibration and focal length calibration, two key metrics can be observed: health-check and focal length imbalance. If the health-check value is below 0.25 and the focal length imbalance is within &#xb1;0.2%, it can be concluded that the camera calibration data are normal, and no update is required (<xref ref-type="bibr" rid="B13">Grunnet-Jepsen et&#xa0;al., 2021</xref>). In the course of the calibration process, a standard calibration target of A4 dimensions is employed. This target features a dashed square with side lengths of 0.1 meters. Illustrations of the standard calibration target and the camera calibration scene are depicted in <xref ref-type="fig" rid="f2">
<bold>Figure&#xa0;2</bold>
</xref>. The results of the on-chip calibration, focal length calibration, and tare calibration are presented in <xref ref-type="fig" rid="f3">
<bold>Figure&#xa0;3</bold>
</xref>.</p>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>Standard calibration target and scene <bold>(A)</bold> Standard calibration Target <bold>(B)</bold> Camera calibration scene.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1371252-g002.tif"/>
</fig>
<fig id="f3" position="float">
<label>Figure&#xa0;3</label>
<caption>
<p>Calibration results <bold>(A)</bold> On-chip calibration result <bold>(B)</bold> Focal length calibration result <bold>(C)</bold> Comparison before and after tare calibration.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1371252-g003.tif"/>
</fig>
<p>
<xref ref-type="fig" rid="f3">
<bold>Figures&#xa0;3A</bold>
</xref>, <xref ref-type="fig" rid="f3">
<bold>B</bold>
</xref> demonstrate that the obtained health-check value and the focal length imbalance value are -0.16 and -0.031%, respectively. These values fall within the established normal range, thus obviating the necessity for updates to the on-chip calibration and focal length calibration data. In addition, <xref ref-type="fig" rid="f3">
<bold>Figure&#xa0;3C</bold>
</xref> illustrates the efficacy of tare calibration, whereby the measurement error was reduced from 1.65 millimeters pre-calibration to 0.29 millimeters post-calibration. Despite the depth error reduction being modest at 1.36 millimeters, the absolute discrepancy between the actual and measured diameters of maize stems&#x2014;critical in the context of measuring tasks&#x2014;is at the millimeter scale. Consequently, the implementation of this calibration process is essential.</p>
</sec>
<sec id="s2_3">
<label>2.3</label>
<title>Data collection</title>
<p>Field trials were carried out at the teaching and research base of Jilin Agricultural University in Changchun, Jilin Province, China. The experimental subjects were maize plants at the small bell stage, with the maize variety being Ji Dan 27. Inter-plant spacing was maintained at 0.4 meters, and inter-row spacing at 0.8 meters. This planting pattern is designed to enhance the convenience of experimental operations while minimizing physical interference between plants by optimizing spatial distribution. The experimental plot spanned an area of 160 meters by 100 meters, corresponding to a planting density of 50,000 plants per hectare. Image acquisition commenced on the 50th day after sowing. The collection activity was scheduled between 15:00 and 18:00 in July 2023, under clear weather with occasional cloudiness. Imagery was acquired from six randomly chosen rows of maize within the experimental plot. In the early stages of crop growth, overlapping of plant canopies was minimal, allowing for the assumption that canopy density exerted an insignificant influence on the data collection.</p>
<p>In the experimental setup, data acquisition was facilitated by an array of instruments, comprising an Intel RealSense D435i camera, a vehicle-mounted mobile platform, a battery with a capacity of 12 ampere-hours (AH), an electrical power inverter, and a laptop computer. The camera was mounted on the vertical frame of a self-designed vehicle platform using a tri-axial adjustment arm. To reduce the influence of adjacent plants and weeds on data collection, the camera was positioned at a 45-degree downward angle to capture images of the maize stems. The camera is operational within a proximal range of 0.3 meters to 3 meters, ensuring optimal function. To ensure full morphological documentation of maize stems, the apparatus is positioned at a distance of 0.6 meters from the base of the maize plants with an elevation of 0.5 meters above the ground level. The energy supply for field operations is provided by a 12AH battery, which, through an inverter, furnishes a consistent power source to a laptop computer. This configuration is designed to guarantee uninterrupted laptop functionality in diverse field conditions.</p>
<p>The laptop in question is configured with the Windows 10 operating system and is equipped with Python 3.10 programming environment and Intel RealSense Viewer v2.54.1. In the Python environment, the camera simultaneously acquired RGB and depth images at fixed poses and generated 3D point clouds of maize stems using the Intel RealSense Viewer. These point clouds were then loaded into the CloudCompare software for visualization. The resolution of 848&#xd7;480 was selected for acquiring both RGB and depth images, as this resolution has been demonstrated to yield the highest quality of depth information from the camera (<xref ref-type="bibr" rid="B14">Grunnet-Jepsen et&#xa0;al., 2018</xref>). A schematic representation of the data acquisition apparatus is depicted in <xref ref-type="fig" rid="f4">
<bold>Figure&#xa0;4</bold>
</xref>. Illustrative examples of the acquired RGB images, depth maps, and 3D point clouds are shown in <xref ref-type="fig" rid="f5">
<bold>Figure&#xa0;5</bold>
</xref>.</p>
<fig id="f4" position="float">
<label>Figure&#xa0;4</label>
<caption>
<p>Field-based mobile measuring platform: <bold>(A)</bold> Schematic of the mobile measuring platform <bold>(B)</bold> Photograph of the actual mobile measuring platform.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1371252-g004.tif"/>
</fig>
<fig id="f5" position="float">
<label>Figure&#xa0;5</label>
<caption>
<p>Maize stem information acquisition: <bold>(A)</bold> RGB image <bold>(B)</bold> Depth map <bold>(C)</bold> 3D point cloud.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1371252-g005.tif"/>
</fig>
</sec>
<sec id="s2_4">
<label>2.4</label>
<title>Data processing</title>
<sec id="s2_4_1">
<label>2.4.1</label>
<title>Image alignment</title>
<p>Techniques for image alignment are principally bifurcated into two categories: the adjustment of RGB images for conformity with depth images, and conversely, the rectification of depth images to align with RGB counterparts. Given the broader field of view of the depth camera compared to the RGB camera on the Intel RealSense D435i, aligning RGB images to depth images can result in data loss or the occurrence of voids in the aligned RGB images. To obviate these impediments, this study employs a method that leaves the RGB images unaltered while aligning the depth images to them, thereby accomplishing the image alignment process. In the Python programming environment, the alignment of images was executed by employing the rs.align class within the pyrealsense2 library. This method produced alignment between depth and color frames, applicable for both RGB and depth image analysis. A comparative illustration of the images before and after alignment is presented in <xref ref-type="fig" rid="f6">
<bold>Figure&#xa0;6</bold>
</xref>.</p>
<fig id="f6" position="float">
<label>Figure&#xa0;6</label>
<caption>
<p>Comparative images before and after alignment: <bold>(A)</bold> RGB image of maize stem <bold>(B)</bold> Depth image before alignment <bold>(C)</bold> Depth image after alignment.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1371252-g006.tif"/>
</fig>
</sec>
<sec id="s2_4_2">
<label>2.4.2</label>
<title>Image preprocessing</title>
<p>In this study, the combined HSV and Otsu algorithm was employed to discriminate the principal maize stems from complex field backgrounds. The OpenCV library&#x2019;s cv.cvtColor and cv.threshold functions were utilized for this task, with the OpenCV library operating on version 4.8.0. Furthermore, the morphological internal gradient algorithm was employed to obtain the contours of maize stems, facilitated by the functions cv.morphologyEx and cv.subtract. Given the established validation of the aforementioned algorithms in antecedent studies, detailed exposition is eschewed in this research (<xref ref-type="bibr" rid="B42">Zhou et&#xa0;al., 2023a</xref>, <xref ref-type="bibr" rid="B43">Zhou et&#xa0;al., 2023b</xref>).</p>
</sec>
<sec id="s2_4_3">
<label>2.4.3</label>
<title>Skeleton extraction algorithm</title>
<p>In the realm of image processing technology, the task of delineating and distilling salient features from intricate image compositions holds paramount significance. Skeletonization is employed as a strategy for the abstraction of morphological characteristics, and is widely acknowledged as an efficacious approach for feature delineation. Presently, methods that incorporate skeletonization algorithms to discern target features have found widespread application across various sectors, including industrial inspection, medical diagnostics, and crop phenotypic analysis (<xref ref-type="bibr" rid="B29">Patel et&#xa0;al., 2012</xref>; <xref ref-type="bibr" rid="B16">Jin and Saha, 2013</xref>; <xref ref-type="bibr" rid="B23">Liu et&#xa0;al., 2021</xref>). In the domain of crop phenotyping, extracting the skeletal structure of crop stems and utilizing this skeleton to assist in the measurement of stem diameter simplifies the complexity involved in such measurements. Furthermore, this approach enhances the automation of measuring stem diameter (<xref ref-type="bibr" rid="B30">Qiao et&#xa0;al., 2022</xref>).</p>
<p>Skeleton extraction algorithms can be primarily categorized into three predominant groups: those that utilize distance transformation, those employing thinning algorithms, and those founded on Voronoi diagrams (<xref ref-type="bibr" rid="B17">Jin and Kim, 2017</xref>). Skeleton extraction algorithms based on distance transformation can generate smoother and more continuous skeletons but may overlook certain details. Methods based on Voronoi diagrams may extract numerous false skeleton branches and are computationally intensive. Relative to alternative approaches, skeleton extraction algorithms that employ thinning techniques are proficient in generating refined skeletons for elongate structures (<xref ref-type="bibr" rid="B8">Chen et&#xa0;al., 2011</xref>). Thus, for the analysis of elongated maize stems, an algorithm based on thinning for skeletonization may be a superior choice.</p>
<p>The algorithm for skeletonization based on thinning operates on binary images where pixels labeled &#x2018;1&#x2019; denote the target pixels, and &#x2018;0&#x2019; designates the background pixels. In this binary context, a pixel manifesting a value of &#x2018;1&#x2019; is delineated as a boundary pixel of the object if it is adjacent to at least one &#x2018;0&#x2019; value pixel within its octal neighborhood. The iterative process begins at these boundary pixels, methodically stripping away pixels from the perimeter of the object that conform to predefined conditions. In the initial phase, the skeletonization algorithm designates a boundary pixel, denoted as <italic>P</italic>
<sub>0</sub>, to act as the central pixel. This pixel is encircled by eight neighboring pixels, labeled <italic>P</italic>
<sub>1</sub> to <italic>P</italic>
<sub>8</sub>, which are arranged clockwise to constitute a 3&#xd7;3 exploration grid. The numbering of this 8-connected neighborhood is shown in <xref ref-type="fig" rid="f7">
<bold>Figure&#xa0;7</bold>
</xref>. Following this setup, the algorithm evaluates whether <italic>P</italic>
<sub>0</sub> fulfills certain predefined criteria as detailed in <xref ref-type="disp-formula" rid="eq1">Equation 1</xref>. Upon satisfying these criteria, <italic>P</italic>
<sub>0</sub> is flagged for exclusion in the subsequent iteration of skeleton pruning.</p>
<fig id="f7" position="float">
<label>Figure&#xa0;7</label>
<caption>
<p>3&#xd7;3 exploration grid.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1371252-g007.tif"/>
</fig>
<disp-formula id="eq1">
<label>(1)</label>
<mml:math display="block" id="M1">
<mml:mrow>
<mml:mtable>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:mn mathvariant="bold">2</mml:mn>
<mml:mo>&#x2264;</mml:mo>
<mml:mtext mathvariant="bold-italic">N</mml:mtext>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">0</mml:mn>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mo>&#x2264;</mml:mo>
<mml:mn mathvariant="bold">6</mml:mn>
</mml:mrow>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:mtext mathvariant="bold-italic">S</mml:mtext>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">0</mml:mn>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mo>=</mml:mo>
<mml:mn mathvariant="bold">1</mml:mn>
</mml:mrow>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">7</mml:mn>
</mml:msub>
<mml:mo>&#xd7;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">1</mml:mn>
</mml:msub>
<mml:mo>&#xd7;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">3</mml:mn>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mn mathvariant="bold">0</mml:mn>
</mml:mrow>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">1</mml:mn>
</mml:msub>
<mml:mo>&#xd7;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">3</mml:mn>
</mml:msub>
<mml:mo>&#xd7;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">5</mml:mn>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mn mathvariant="bold">0</mml:mn>
</mml:mrow>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:mrow>
</mml:math>
</disp-formula>
<p>Here, <italic>N</italic>(<italic>P</italic>
<sub>0</sub>) denotes the number of pixels with a value of 1 within the 8-neighborhood of <italic>P</italic>
<sub>0</sub>, and <italic>S</italic>(<italic>P</italic>
<sub>0</sub>) represents the number of transitions from 0 to 1 among the eight neighboring pixels around <italic>P</italic>
<sub>0</sub> when they are considered in a clockwise direction.</p>
<p>The decision criteria are modified such that the product of <italic>P</italic>
<sub>2</sub>, <italic>P</italic>
<sub>4</sub>, and <italic>P</italic>
<sub>8</sub> equals zero as well as the product of <italic>P</italic>
<sub>1</sub>, <italic>P</italic>
<sub>2</sub>, and <italic>P</italic>
<sub>3</sub> equals zero. Following the establishment of these conditions, a subsequent assessment is undertaken to identify and subsequently prune pixels conforming to these established decision metrics. The precise conditions governing these evaluations are delineated in <xref ref-type="disp-formula" rid="eq2">Equation 2</xref>.</p>
<disp-formula id="eq2">
<label>(2)</label>
<mml:math display="block" id="M2">
<mml:mrow>
<mml:mtable>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:mn mathvariant="bold">2</mml:mn>
<mml:mo>&#x2264;</mml:mo>
<mml:mtext mathvariant="bold-italic">N</mml:mtext>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">0</mml:mn>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mo>&#x2264;</mml:mo>
<mml:mn mathvariant="bold">6</mml:mn>
</mml:mrow>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:mtext mathvariant="bold-italic">S</mml:mtext>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">0</mml:mn>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mo>=</mml:mo>
<mml:mn mathvariant="bold">1</mml:mn>
</mml:mrow>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">7</mml:mn>
</mml:msub>
<mml:mo>&#xd7;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">1</mml:mn>
</mml:msub>
<mml:mo>&#xd7;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">5</mml:mn>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mn mathvariant="bold">0</mml:mn>
</mml:mrow>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">7</mml:mn>
</mml:msub>
<mml:mo>&#xd7;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">3</mml:mn>
</mml:msub>
<mml:mo>&#xd7;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">P</mml:mtext>
<mml:mn mathvariant="bold">5</mml:mn>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mn mathvariant="bold">0</mml:mn>
</mml:mrow>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:mrow>
</mml:math>
</disp-formula>
<p>After conducting two successive rounds of condition evaluation, one iteration of the algorithm concludes. This sequence is reiterated persistently until a state is reached where none of the pixel points fulfill the criteria for assessment. This iterative process culminates in the derivation of the skeleton of the target object.</p>
</sec>
<sec id="s2_4_4">
<label>2.4.4</label>
<title>Image processing workflow</title>
<p>Three distinct image sets were randomly sampled from a collection of sixty field maize image groups for experimental analysis. The field maize images, images based on the HSV color space, images processed with the HSV and Otsu algorithms, images of maize stem processed using denoising algorithms, internal gradient algorithms, and skeleton extraction algorithms are presented in <xref ref-type="fig" rid="f8">
<bold>Figure&#xa0;8</bold>
</xref>.</p>
<fig id="f8" position="float">
<label>Figure&#xa0;8</label>
<caption>
<p>Image processing process: <bold>(A&#x2013;C)</bold> Field maize images; <bold>(D&#x2013;F)</bold> HSV images; <bold>(G&#x2013;I)</bold> HSV+Otsu images; <bold>(J&#x2013;L)</bold> Denoised images obtained through median filtering, binarization, and morphological opening operations; <bold>(M&#x2013;O)</bold> Maize stem contour images obtained via internal gradient algorithms; <bold>(P&#x2013;R)</bold> Skeleton images obtained through skeleton extraction algorithms.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1371252-g008.tif"/>
</fig>
</sec>
<sec id="s2_4_5">
<label>2.4.5</label>
<title>Coordinate extraction and stem diameter measurement</title>
<p>In the maize stem skeleton images, considering that the diameter of the second internode can directly affect the maize&#x2019;s lodging resistance, the second internode of the maize stem has been designated as the area of interest (<xref ref-type="bibr" rid="B41">Zhang et&#xa0;al., 2018</xref>). In the specified region of interest, coordinate extraction in two dimensions is assisted by utilizing the cv2.inRange function from the OpenCV library within a Python environment. The process is fully automated to obviate manual intervention. Initially, a point located on the skeletal line within the defined region of interest is identified and annotated on the image. Subsequently, a horizontal line emanating from this reference point is extended to ascertain the intersection with the contour of maize stem. Concluding this step, the points of intersection are labeled on the image, and the 2D pixel coordinates corresponding to these intersections are meticulously documented. In the region of interest, the extraction procedure is performed three times to confirm the accuracy and consistency of the stem diameter measurements. The process of 2D coordinate extraction is illustrated in <xref ref-type="fig" rid="f9">
<bold>Figure&#xa0;9</bold>
</xref>.</p>
<fig id="f9" position="float">
<label>Figure&#xa0;9</label>
<caption>
<p>2D coordinate extraction process.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1371252-g009.tif"/>
</fig>
<p>To transform 2D pixel coordinates into their 3D world coordinates, a synthesis of depth data, intrinsic camera parameters, and the 2D pixel coordinates is essential to achieve the back-projection transformation from pixel to world space. After the back-projection transformation, 3D world coordinates based on the coordinate system of the color flow camera can be obtained. The transformation formula for back-projection is delineated in <xref ref-type="disp-formula" rid="eq3">Equation 3</xref>. The intrinsic parameters characterizing the Intel RealSense D435i camera with a resolution of 848&#xd7;480 are itemized in <xref ref-type="supplementary-material" rid="SM1">
<bold>Supplementary Table&#xa0;1</bold>
</xref>.</p>
<disp-formula id="eq3">
<label>(3)</label>
<mml:math display="block" id="M3">
<mml:mrow>
<mml:mtable>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:mtext mathvariant="bold-italic">Z</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mtext mathvariant="bold-italic">d</mml:mtext>
</mml:mrow>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:mtext mathvariant="bold-italic">X</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mtext mathvariant="bold-italic">x</mml:mtext>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">c</mml:mtext>
<mml:mtext mathvariant="bold-italic">x</mml:mtext>
</mml:msub>
</mml:mrow>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">f</mml:mtext>
<mml:mtext mathvariant="bold-italic">x</mml:mtext>
</mml:msub>
</mml:mrow>
</mml:mfrac>
<mml:mo>&#xd7;</mml:mo>
<mml:mtext mathvariant="bold-italic">Z</mml:mtext>
</mml:mrow>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:mtext mathvariant="bold-italic">Y</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mtext mathvariant="bold-italic">y</mml:mtext>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">c</mml:mtext>
<mml:mtext mathvariant="bold-italic">y</mml:mtext>
</mml:msub>
</mml:mrow>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">f</mml:mtext>
<mml:mtext mathvariant="bold-italic">y</mml:mtext>
</mml:msub>
</mml:mrow>
</mml:mfrac>
<mml:mo>&#xd7;</mml:mo>
<mml:mtext mathvariant="bold-italic">Z</mml:mtext>
</mml:mrow>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:mrow>
</mml:math>
</disp-formula>
<p>Here, (<italic>x</italic>,<italic>y</italic>) represent pixel coordinates on the 2D image plane, d corresponds to depth information for those pixel points in 3D space, (<italic>c<sub>x</sub>
</italic>,<italic>c<sub>y</sub>
</italic>) correspond to principal point coordinates within camera intrinsic parameters, and <italic>f<sub>x</sub>
</italic> and <italic>f<sub>y</sub>
</italic> denote camera focal lengths along <italic>x</italic> and <italic>y</italic> axes, respectively.</p>
<p>Furthermore, for precise mapping of spatial positions of 3D world coordinates onto the maize stem point cloud, transformation of the 3D world coordinates from the right-handed coordinate system, utilized by OpenCV, to the coordinate system of the color stream camera of the Intel RealSense Viewer is imperative. This necessitates an inversion operation. Thereafter, a rigid transformation is performed to transfer the 3D world coordinates from the color stream camera coordinate system to the depth stream camera coordinate system. Initially, OpenCV employs a right-handed coordinate system by convention, which diverges from the color stream camera coordinate system defined by the Intel RealSense Viewer. Consequently, this research necessitates inverting the y and z-axis values of the 3D world coordinates to conform to the coordinate system defined for the color stream camera. In addition, the Intel RealSense Viewer generates the 3D point cloud of the maize stem using the coordinate system of the depth stream camera, distinct from the coordinate system for the color stream camera that locates the 3D world coordinates. To accurately map 3D world coordinates within the point cloud, this study applies rigid transformation techniques to convert the 3D world coordinates from the color stream camera coordinate system to that of the depth stream camera. Upon transformation, the 3D world coordinates are delineated in red within the point cloud, corroborating the precision of spatial position representation of the method employed to acquire field maize stem diameter using depth data. The algorithm governing rigid transformation is encapsulated in <xref ref-type="disp-formula" rid="eq4">Equation 4</xref>, camera extrinsic parameters are enumerated in <xref ref-type="supplementary-material" rid="SM1">
<bold>Supplementary Table&#xa0;2</bold>
</xref>, and <xref ref-type="fig" rid="f10">
<bold>Figure&#xa0;10</bold>
</xref> illustrates a comparative schematic of the conversion between coordinate systems.</p>
<fig id="f10" position="float">
<label>Figure&#xa0;10</label>
<caption>
<p>3D world coordinates in different coordinate systems: <bold>(A)</bold> 3D world coordinates in the right-handed coordinate system; <bold>(B)</bold> 3D world coordinates in the color stream camera coordinate system; <bold>(C)</bold> 3D world coordinates in the depth stream camera coordinate system.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1371252-g010.tif"/>
</fig>
<disp-formula id="eq4">
<label>(4)</label>
<mml:math display="block" id="M4">
<mml:mrow>
<mml:msup>
<mml:mtext mathvariant="bold-italic">p</mml:mtext>
<mml:mo>,</mml:mo>
</mml:msup>
<mml:mo>=</mml:mo>
<mml:mtext mathvariant="bold-italic">Rp</mml:mtext>
<mml:mo>+</mml:mo>
<mml:mtext mathvariant="bold-italic">t</mml:mtext>
</mml:mrow>
</mml:math>
</disp-formula>
<p>Here, <italic>R</italic> represents camera rotation matrix, <italic>t</italic> denotes camera translation vector, <italic>p</italic> is the 3D world coordinate in color stream camera coordinate system, and <italic>p&#x2019;</italic> is the 3D world coordinate in depth stream camera coordinate system.</p>
<p>
<xref ref-type="fig" rid="f10">
<bold>Figure&#xa0;10</bold>
</xref> illustrates the process of obtaining stem diameter measurements through the computation of Euclidean distance between pairs of 3D world coordinates. The formula for the Euclidean distance between two points in 3D space is presented as <xref ref-type="disp-formula" rid="eq5">Equation 5</xref>.</p>
<disp-formula id="eq5">
<label>(5)</label>
<mml:math display="block" id="M5">
<mml:mrow>
<mml:mtext mathvariant="bold-italic">d</mml:mtext>
<mml:mo>=</mml:mo>
<mml:msqrt>
<mml:mrow>
<mml:msup>
<mml:mrow>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">x</mml:mtext>
<mml:mn mathvariant="bold">2</mml:mn>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">x</mml:mtext>
<mml:mn mathvariant="bold">1</mml:mn>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:mrow>
<mml:mn mathvariant="bold">2</mml:mn>
</mml:msup>
<mml:mo>+</mml:mo>
<mml:msup>
<mml:mrow>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">y</mml:mtext>
<mml:mn mathvariant="bold">2</mml:mn>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">y</mml:mtext>
<mml:mn mathvariant="bold">1</mml:mn>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:mrow>
<mml:mn mathvariant="bold">2</mml:mn>
</mml:msup>
<mml:mo>+</mml:mo>
<mml:msup>
<mml:mrow>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">z</mml:mtext>
<mml:mn mathvariant="bold">2</mml:mn>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">z</mml:mtext>
<mml:mn mathvariant="bold">1</mml:mn>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:mrow>
<mml:mn mathvariant="bold">2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:msqrt>
</mml:mrow>
</mml:math>
</disp-formula>
<p>Here, (<italic>x</italic>
<sub>1</sub>,<italic>y</italic>
<sub>1</sub>,<italic>z</italic>
<sub>1</sub>) and (<italic>x</italic>
<sub>2</sub>,<italic>y</italic>
<sub>2</sub>,<italic>z</italic>
<sub>2</sub>) represent the 3D world coordinates of the two points, respectively, with d denoting the distance between them.</p>
<p>In conclusion, the depth data procured from the RGB-D camera has been effectively employed to determine the diameter of maize stems in situ. This method furnishes significant data support for further investigative endeavors.</p>
</sec>
</sec>
<sec id="s2_5">
<label>2.5</label>
<title>Evaluation metrics</title>
<p>To ascertain the accuracy of the method for deriving maize stem diameter measurements <italic>in situ</italic> from depth information, this study executed manual measurements of maize stem diameters using a Vernier caliper and conducted a comparative analysis between these manual measurements and the measurements derived from depth information. The Mean Absolute Percentage Error <italic>(MAPE)</italic>, Mean Absolute Error <italic>(MAE)</italic>, Root Mean Square Error <italic>(RMSE)</italic>, and the coefficient of determination <italic>(R</italic>&#xb2;) serve as metrics to evaluate accuracy. The computational formulas for these indices are delineated in <xref ref-type="disp-formula" rid="eq6">Equations 6</xref>&#x2013;<xref ref-type="disp-formula" rid="eq9">9</xref>.</p>
<disp-formula id="eq6">
<label>(6)</label>
<mml:math display="block" id="M6">
<mml:mrow>
<mml:mtext mathvariant="bold-italic">MAPE</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mn mathvariant="bold">1</mml:mn>
<mml:mtext mathvariant="bold-italic">n</mml:mtext>
</mml:mfrac>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mn mathvariant="bold">1</mml:mn>
</mml:mrow>
<mml:mtext mathvariant="bold-italic">n</mml:mtext>
</mml:msubsup>
<mml:mfrac>
<mml:mrow>
<mml:mrow>
<mml:mo>|</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">w</mml:mtext>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">k</mml:mtext>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
</mml:msub>
</mml:mrow>
<mml:mo>|</mml:mo>
</mml:mrow>
</mml:mrow>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">k</mml:mtext>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
</mml:msub>
</mml:mrow>
</mml:mfrac>
<mml:mo>&#xd7;</mml:mo>
<mml:mn mathvariant="bold">100</mml:mn>
<mml:mo>%</mml:mo>
</mml:mrow>
</mml:math>
</disp-formula>
<disp-formula id="eq7">
<label>(7)</label>
<mml:math display="block" id="M7">
<mml:mrow>
<mml:mtext mathvariant="bold-italic">MAE</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mn mathvariant="bold">1</mml:mn>
<mml:mtext mathvariant="bold-italic">n</mml:mtext>
</mml:mfrac>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mn mathvariant="bold">1</mml:mn>
</mml:mrow>
<mml:mtext mathvariant="bold-italic">n</mml:mtext>
</mml:msubsup>
<mml:mrow>
<mml:mo>|</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">w</mml:mtext>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">k</mml:mtext>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
</mml:msub>
</mml:mrow>
<mml:mo>|</mml:mo>
</mml:mrow>
</mml:mrow>
</mml:math>
</disp-formula>
<disp-formula id="eq8">
<label>(8)</label>
<mml:math display="block" id="M8">
<mml:mrow>
<mml:mtext mathvariant="bold-italic">RMSE</mml:mtext>
<mml:mo>=</mml:mo>
<mml:msqrt>
<mml:mrow>
<mml:mfrac>
<mml:mn mathvariant="bold">1</mml:mn>
<mml:mtext mathvariant="bold-italic">n</mml:mtext>
</mml:mfrac>
<mml:munderover>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mn mathvariant="bold">1</mml:mn>
</mml:mrow>
<mml:mtext mathvariant="bold-italic">n</mml:mtext>
</mml:munderover>
<mml:msup>
<mml:mrow>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">w</mml:mtext>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">k</mml:mtext>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:mrow>
<mml:mn mathvariant="bold">2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:msqrt>
</mml:mrow>
</mml:math>
</disp-formula>
<disp-formula id="eq9">
<label>(9)</label>
<mml:math display="block" id="M9">
<mml:mrow>
<mml:msup>
<mml:mtext mathvariant="bold-italic">R</mml:mtext>
<mml:mn mathvariant="bold">2</mml:mn>
</mml:msup>
<mml:mo>=</mml:mo>
<mml:mn mathvariant="bold">1</mml:mn>
<mml:mo>&#x2212;</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mn mathvariant="bold">1</mml:mn>
</mml:mrow>
<mml:mtext mathvariant="bold-italic">n</mml:mtext>
</mml:msubsup>
<mml:msup>
<mml:mrow>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">k</mml:mtext>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">w</mml:mtext>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:mrow>
<mml:mn mathvariant="bold">2</mml:mn>
</mml:msup>
</mml:mrow>
<mml:mrow>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mn mathvariant="bold">1</mml:mn>
</mml:mrow>
<mml:mtext mathvariant="bold-italic">n</mml:mtext>
</mml:msubsup>
<mml:msup>
<mml:mrow>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">k</mml:mtext>
<mml:mtext mathvariant="bold-italic">i</mml:mtext>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:mover accent="true">
<mml:mtext mathvariant="bold-italic">k</mml:mtext>
<mml:mo>&#xaf;</mml:mo>
</mml:mover>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:mrow>
<mml:mn mathvariant="bold">2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:mfrac>
</mml:mrow>
</mml:math>
</disp-formula>
<p>Here, <italic>n</italic> represents the number of plant samples, <italic>w<sub>i</sub>
</italic> represents the stem diameter measurements based on depth information, <italic>k<sub>i</sub>
</italic> denotes the manually measured values, and <inline-formula>
<mml:math display="inline" id="im1">
<mml:mrow>
<mml:mover accent="true">
<mml:mi>k</mml:mi>
<mml:mo>&#xaf;</mml:mo>
</mml:mover>
</mml:mrow>
</mml:math>
</inline-formula> is the average value of the manual measurements of maize stem diameter.</p>
</sec>
</sec>
<sec id="s3" sec-type="results">
<label>3</label>
<title>Results</title>
<sec id="s3_1">
<label>3.1</label>
<title>Error analysis of maize stem diameter measurements based on depth information</title>
<p>A random selection of 60 sets of maize plants was utilized as the experimental material. The maize stem diameters for these sets were obtained based on depth information. The error analysis data comparing stem diameter measurements with manual measurements are shown in <xref ref-type="table" rid="T1">
<bold>Table&#xa0;1</bold>
</xref>.</p>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Comparison between stem diameter measurements obtained from depth information and manual measurements.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Number</th>
<th valign="middle" align="center">Measured Stem<break/>Diameter/mm</th>
<th valign="middle" align="center">True Stem<break/>Diameter/mm</th>
<th valign="middle" align="center">Absolute Error/mm</th>
<th valign="middle" align="center">Number</th>
<th valign="middle" align="center">Measured Stem<break/>Diameter/mm</th>
<th valign="middle" align="center">True Stem<break/>Diameter/mm</th>
<th valign="middle" align="center">Absolute Error/mm</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="center">1</td>
<td valign="top" align="center">34.72</td>
<td valign="top" align="center">37.42</td>
<td valign="middle" align="center">2.70</td>
<td valign="middle" align="center">31</td>
<td valign="top" align="center">22.72</td>
<td valign="top" align="center">21.66</td>
<td valign="top" align="center">1.06</td>
</tr>
<tr>
<td valign="top" align="center">2</td>
<td valign="top" align="center">26.48</td>
<td valign="top" align="center">27.06</td>
<td valign="middle" align="center">0.58</td>
<td valign="middle" align="center">32</td>
<td valign="top" align="center">25.64</td>
<td valign="top" align="center">25.55</td>
<td valign="top" align="center">0.09</td>
</tr>
<tr>
<td valign="top" align="center">3</td>
<td valign="top" align="center">31.35</td>
<td valign="top" align="center">32.00</td>
<td valign="middle" align="center">0.65</td>
<td valign="middle" align="center">33</td>
<td valign="top" align="center">20.93</td>
<td valign="top" align="center">20.69</td>
<td valign="top" align="center">0.24</td>
</tr>
<tr>
<td valign="top" align="center">4</td>
<td valign="top" align="center">39.10</td>
<td valign="top" align="center">40.08</td>
<td valign="middle" align="center">0.98</td>
<td valign="middle" align="center">34</td>
<td valign="top" align="center">21.72</td>
<td valign="top" align="center">22.29</td>
<td valign="top" align="center">0.57</td>
</tr>
<tr>
<td valign="top" align="center">5</td>
<td valign="top" align="center">35.76</td>
<td valign="top" align="center">36.07</td>
<td valign="middle" align="center">0.31</td>
<td valign="middle" align="center">35</td>
<td valign="top" align="center">24.12</td>
<td valign="top" align="center">23.89</td>
<td valign="top" align="center">0.23</td>
</tr>
<tr>
<td valign="top" align="center">6</td>
<td valign="top" align="center">22.79</td>
<td valign="top" align="center">26.47</td>
<td valign="middle" align="center">3.68</td>
<td valign="middle" align="center">36</td>
<td valign="top" align="center">22.89</td>
<td valign="top" align="center">21.92</td>
<td valign="top" align="center">0.97</td>
</tr>
<tr>
<td valign="top" align="center">7</td>
<td valign="top" align="center">34.97</td>
<td valign="top" align="center">36.01</td>
<td valign="middle" align="center">1.04</td>
<td valign="middle" align="center">37</td>
<td valign="top" align="center">21.49</td>
<td valign="top" align="center">23.53</td>
<td valign="top" align="center">2.04</td>
</tr>
<tr>
<td valign="top" align="center">8</td>
<td valign="top" align="center">30.12</td>
<td valign="top" align="center">30.91</td>
<td valign="middle" align="center">0.79</td>
<td valign="middle" align="center">38</td>
<td valign="top" align="center">26.74</td>
<td valign="top" align="center">26.34</td>
<td valign="top" align="center">0.40</td>
</tr>
<tr>
<td valign="top" align="center">9</td>
<td valign="top" align="center">29.12</td>
<td valign="top" align="center">30.10</td>
<td valign="middle" align="center">0.98</td>
<td valign="middle" align="center">39</td>
<td valign="top" align="center">20.27</td>
<td valign="top" align="center">20.11</td>
<td valign="top" align="center">0.16</td>
</tr>
<tr>
<td valign="top" align="center">10</td>
<td valign="top" align="center">23.80</td>
<td valign="top" align="center">23.71</td>
<td valign="middle" align="center">0.09</td>
<td valign="middle" align="center">40</td>
<td valign="top" align="center">26.82</td>
<td valign="top" align="center">26.62</td>
<td valign="top" align="center">0.20</td>
</tr>
<tr>
<td valign="top" align="center">11</td>
<td valign="top" align="center">21.88</td>
<td valign="top" align="center">21.92</td>
<td valign="middle" align="center">0.04</td>
<td valign="middle" align="center">41</td>
<td valign="top" align="center">27.66</td>
<td valign="top" align="center">28.15</td>
<td valign="top" align="center">0.49</td>
</tr>
<tr>
<td valign="top" align="center">12</td>
<td valign="top" align="center">32.24</td>
<td valign="top" align="center">31.48</td>
<td valign="middle" align="center">0.76</td>
<td valign="middle" align="center">42</td>
<td valign="top" align="center">21.69</td>
<td valign="top" align="center">21.54</td>
<td valign="top" align="center">0.15</td>
</tr>
<tr>
<td valign="top" align="center">13</td>
<td valign="top" align="center">27.60</td>
<td valign="top" align="center">27.52</td>
<td valign="middle" align="center">0.08</td>
<td valign="middle" align="center">43</td>
<td valign="top" align="center">23.01</td>
<td valign="top" align="center">22.46</td>
<td valign="top" align="center">0.55</td>
</tr>
<tr>
<td valign="top" align="center">14</td>
<td valign="top" align="center">25.16</td>
<td valign="top" align="center">25.90</td>
<td valign="middle" align="center">0.74</td>
<td valign="middle" align="center">44</td>
<td valign="top" align="center">20.25</td>
<td valign="top" align="center">20.49</td>
<td valign="top" align="center">0.24</td>
</tr>
<tr>
<td valign="top" align="center">15</td>
<td valign="top" align="center">30.26</td>
<td valign="top" align="center">29.72</td>
<td valign="middle" align="center">0.54</td>
<td valign="middle" align="center">45</td>
<td valign="top" align="center">21.39</td>
<td valign="top" align="center">22.37</td>
<td valign="top" align="center">0.98</td>
</tr>
<tr>
<td valign="top" align="center">16</td>
<td valign="top" align="center">30.79</td>
<td valign="top" align="center">30.43</td>
<td valign="middle" align="center">0.36</td>
<td valign="middle" align="center">46</td>
<td valign="top" align="center">23.37</td>
<td valign="top" align="center">23.27</td>
<td valign="top" align="center">0.10</td>
</tr>
<tr>
<td valign="top" align="center">17</td>
<td valign="top" align="center">28.21</td>
<td valign="top" align="center">30.76</td>
<td valign="middle" align="center">2.55</td>
<td valign="middle" align="center">47</td>
<td valign="top" align="center">21.34</td>
<td valign="top" align="center">21.58</td>
<td valign="top" align="center">0.24</td>
</tr>
<tr>
<td valign="top" align="center">18</td>
<td valign="top" align="center">39.76</td>
<td valign="top" align="center">40.61</td>
<td valign="middle" align="center">0.85</td>
<td valign="middle" align="center">48</td>
<td valign="top" align="center">26.50</td>
<td valign="top" align="center">24.36</td>
<td valign="top" align="center">2.14</td>
</tr>
<tr>
<td valign="top" align="center">19</td>
<td valign="top" align="center">28.50</td>
<td valign="top" align="center">29.61</td>
<td valign="middle" align="center">1.11</td>
<td valign="middle" align="center">49</td>
<td valign="top" align="center">20.07</td>
<td valign="top" align="center">17.55</td>
<td valign="top" align="center">2.52</td>
</tr>
<tr>
<td valign="top" align="center">20</td>
<td valign="top" align="center">21.48</td>
<td valign="top" align="center">21.67</td>
<td valign="middle" align="center">0.19</td>
<td valign="middle" align="center">50</td>
<td valign="top" align="center">20.05</td>
<td valign="top" align="center">19.38</td>
<td valign="top" align="center">0.67</td>
</tr>
<tr>
<td valign="top" align="center">21</td>
<td valign="top" align="center">27.86</td>
<td valign="top" align="center">28.86</td>
<td valign="middle" align="center">1.00</td>
<td valign="middle" align="center">51</td>
<td valign="top" align="center">17.17</td>
<td valign="top" align="center">17.89</td>
<td valign="top" align="center">0.72</td>
</tr>
<tr>
<td valign="top" align="center">22</td>
<td valign="top" align="center">22.69</td>
<td valign="top" align="center">23.56</td>
<td valign="middle" align="center">0.87</td>
<td valign="middle" align="center">52</td>
<td valign="top" align="center">21.43</td>
<td valign="top" align="center">21.52</td>
<td valign="top" align="center">0.09</td>
</tr>
<tr>
<td valign="top" align="center">23</td>
<td valign="top" align="center">22.41</td>
<td valign="top" align="center">23.47</td>
<td valign="middle" align="center">1.06</td>
<td valign="middle" align="center">53</td>
<td valign="top" align="center">24.82</td>
<td valign="top" align="center">23.35</td>
<td valign="top" align="center">1.47</td>
</tr>
<tr>
<td valign="top" align="center">24</td>
<td valign="top" align="center">24.71</td>
<td valign="top" align="center">24.76</td>
<td valign="middle" align="center">0.05</td>
<td valign="middle" align="center">54</td>
<td valign="top" align="center">24.58</td>
<td valign="top" align="center">22.64</td>
<td valign="top" align="center">1.94</td>
</tr>
<tr>
<td valign="top" align="center">25</td>
<td valign="top" align="center">22.04</td>
<td valign="top" align="center">21.85</td>
<td valign="middle" align="center">0.19</td>
<td valign="middle" align="center">55</td>
<td valign="top" align="center">25.87</td>
<td valign="top" align="center">25.62</td>
<td valign="top" align="center">0.25</td>
</tr>
<tr>
<td valign="top" align="center">26</td>
<td valign="top" align="center">24.91</td>
<td valign="top" align="center">24.51</td>
<td valign="middle" align="center">0.40</td>
<td valign="middle" align="center">56</td>
<td valign="top" align="center">22.81</td>
<td valign="top" align="center">22.60</td>
<td valign="top" align="center">0.21</td>
</tr>
<tr>
<td valign="top" align="center">27</td>
<td valign="top" align="center">25.66</td>
<td valign="top" align="center">26.47</td>
<td valign="middle" align="center">0.81</td>
<td valign="middle" align="center">57</td>
<td valign="top" align="center">21.07</td>
<td valign="top" align="center">20.67</td>
<td valign="top" align="center">0.40</td>
</tr>
<tr>
<td valign="top" align="center">28</td>
<td valign="top" align="center">23.68</td>
<td valign="top" align="center">23.77</td>
<td valign="middle" align="center">0.09</td>
<td valign="middle" align="center">58</td>
<td valign="top" align="center">21.87</td>
<td valign="top" align="center">21.74</td>
<td valign="top" align="center">0.13</td>
</tr>
<tr>
<td valign="top" align="center">29</td>
<td valign="top" align="center">22.42</td>
<td valign="top" align="center">21.24</td>
<td valign="middle" align="center">1.18</td>
<td valign="middle" align="center">59</td>
<td valign="top" align="center">18.46</td>
<td valign="top" align="center">17.49</td>
<td valign="top" align="center">0.97</td>
</tr>
<tr>
<td valign="top" align="center">30</td>
<td valign="top" align="center">21.62</td>
<td valign="top" align="center">21.55</td>
<td valign="middle" align="center">0.07</td>
<td valign="middle" align="center">60</td>
<td valign="top" align="center">19.91</td>
<td valign="top" align="center">19.60</td>
<td valign="top" align="center">0.31</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Analysis of data from <xref ref-type="table" rid="T1">
<bold>Table&#xa0;1</bold>
</xref> reveals that the <italic>MAPE</italic> for the sampled set of 60 maize stem diameters is 3.01%, the <italic>MAE</italic> measures at 0.75mm, and the <italic>RMSE</italic> stands at 1.07mm. Given that the <italic>MAE</italic> is below 1mm and the <italic>MAPE</italic> does not exceed 3.1%, measurements of maize stem diameters based on depth information are shown to be accurate.</p>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Comparative error analysis of maize stem diameter measurements based on the pinhole imaging principle and the method described in this paper</title>
<p>Previous research has effectively measured the diameter of maize stems in the field utilizing a checkerboard for reference, applying the pinhole imaging principle (<xref ref-type="bibr" rid="B42">Zhou et&#xa0;al., 2023a</xref>). The present study seeks to evaluate the efficacy of measuring maize stem diameters in the field by comparing the pinhole imaging principle with the method proposed herein. Specifically, when the camera captured images of field maize using the method outlined in this paper, images of field maize with a checkerboard were also taken at the same location and angle. These two measurement tasks were completed consecutively within the same day to ensure consistency in experimental conditions. To augment the precision of measurements derived from the pinhole imaging principle, this study introduced enhancements to the experimental apparatus. Specifically, the checkerboard was fixed using a triaxial adjustment arm, which aids in precisely regulating its tilt angle to ensure that the checkerboard is as parallel as possible to the imaging plane of the camera. Images of field maize obtained using the pinhole imaging principle are shown in <xref ref-type="fig" rid="f11">
<bold>Figure&#xa0;11</bold>
</xref>.&#xa0;A total of 60 maize plant samples were selected as experimental material, which are the same sets as those used in Section 3.1. The diameters of maize stems from these samples were quantified employing the pinhole imaging principle. The error analysis data comparing stem diameter measurements with manual measurements are presented in <xref ref-type="table" rid="T2">
<bold>Table&#xa0;2</bold>
</xref>.</p>
<fig id="f11" position="float">
<label>Figure&#xa0;11</label>
<caption>
<p>Field maize images obtained based on the pinhole imaging principle.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1371252-g011.tif"/>
</fig>
<table-wrap id="T2" position="float">
<label>Table&#xa0;2</label>
<caption>
<p>Comparison of stem diameter measurements obtained using the pinhole imaging principle and manual measurements.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Number</th>
<th valign="middle" align="center">Measured Stem<break/>Diameter/mm</th>
<th valign="middle" align="center">True Stem<break/>Diameter/mm</th>
<th valign="middle" align="center">Absolute Error/mm</th>
<th valign="middle" align="center">Number</th>
<th valign="middle" align="center">Measured Stem<break/>Diameter/mm</th>
<th valign="middle" align="center">True Stem<break/>Diameter/mm</th>
<th valign="middle" align="center">Absolute Error/mm</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="center">1</td>
<td valign="top" align="center">34.57</td>
<td valign="top" align="center">37.42</td>
<td valign="top" align="center">2.85</td>
<td valign="middle" align="center">31</td>
<td valign="top" align="center">23.06</td>
<td valign="top" align="center">21.66</td>
<td valign="top" align="center">1.40</td>
</tr>
<tr>
<td valign="top" align="center">2</td>
<td valign="top" align="center">26.05</td>
<td valign="top" align="center">27.06</td>
<td valign="top" align="center">1.01</td>
<td valign="middle" align="center">32</td>
<td valign="top" align="center">26.56</td>
<td valign="top" align="center">25.55</td>
<td valign="top" align="center">1.01</td>
</tr>
<tr>
<td valign="top" align="center">3</td>
<td valign="top" align="center">29.71</td>
<td valign="top" align="center">32.00</td>
<td valign="top" align="center">2.29</td>
<td valign="middle" align="center">33</td>
<td valign="top" align="center">21.94</td>
<td valign="top" align="center">20.69</td>
<td valign="top" align="center">1.25</td>
</tr>
<tr>
<td valign="top" align="center">4</td>
<td valign="top" align="center">41.17</td>
<td valign="top" align="center">40.08</td>
<td valign="top" align="center">1.09</td>
<td valign="middle" align="center">34</td>
<td valign="top" align="center">22.00</td>
<td valign="top" align="center">22.29</td>
<td valign="top" align="center">0.29</td>
</tr>
<tr>
<td valign="top" align="center">5</td>
<td valign="top" align="center">36.92</td>
<td valign="top" align="center">36.07</td>
<td valign="top" align="center">0.85</td>
<td valign="middle" align="center">35</td>
<td valign="top" align="center">26.43</td>
<td valign="top" align="center">23.89</td>
<td valign="top" align="center">2.54</td>
</tr>
<tr>
<td valign="top" align="center">6</td>
<td valign="top" align="center">22.04</td>
<td valign="top" align="center">26.47</td>
<td valign="top" align="center">4.43</td>
<td valign="middle" align="center">36</td>
<td valign="top" align="center">24.47</td>
<td valign="top" align="center">21.92</td>
<td valign="top" align="center">2.55</td>
</tr>
<tr>
<td valign="top" align="center">7</td>
<td valign="top" align="center">32.50</td>
<td valign="top" align="center">36.01</td>
<td valign="top" align="center">3.51</td>
<td valign="middle" align="center">37</td>
<td valign="top" align="center">20.83</td>
<td valign="top" align="center">23.53</td>
<td valign="top" align="center">2.70</td>
</tr>
<tr>
<td valign="top" align="center">8</td>
<td valign="top" align="center">32.05</td>
<td valign="top" align="center">30.91</td>
<td valign="top" align="center">1.14</td>
<td valign="middle" align="center">38</td>
<td valign="top" align="center">28.16</td>
<td valign="top" align="center">26.34</td>
<td valign="top" align="center">1.82</td>
</tr>
<tr>
<td valign="top" align="center">9</td>
<td valign="top" align="center">28.54</td>
<td valign="top" align="center">30.10</td>
<td valign="top" align="center">1.56</td>
<td valign="middle" align="center">39</td>
<td valign="top" align="center">20.28</td>
<td valign="top" align="center">20.11</td>
<td valign="top" align="center">0.17</td>
</tr>
<tr>
<td valign="top" align="center">10</td>
<td valign="top" align="center">24.55</td>
<td valign="top" align="center">23.71</td>
<td valign="top" align="center">0.84</td>
<td valign="middle" align="center">40</td>
<td valign="top" align="center">28.40</td>
<td valign="top" align="center">26.62</td>
<td valign="top" align="center">1.78</td>
</tr>
<tr>
<td valign="top" align="center">11</td>
<td valign="top" align="center">22.80</td>
<td valign="top" align="center">21.92</td>
<td valign="top" align="center">0.88</td>
<td valign="middle" align="center">41</td>
<td valign="top" align="center">30.28</td>
<td valign="top" align="center">28.15</td>
<td valign="top" align="center">2.13</td>
</tr>
<tr>
<td valign="top" align="center">12</td>
<td valign="top" align="center">32.93</td>
<td valign="top" align="center">31.48</td>
<td valign="top" align="center">1.45</td>
<td valign="middle" align="center">42</td>
<td valign="top" align="center">23.25</td>
<td valign="top" align="center">21.54</td>
<td valign="top" align="center">1.71</td>
</tr>
<tr>
<td valign="top" align="center">13</td>
<td valign="top" align="center">30.91</td>
<td valign="top" align="center">27.52</td>
<td valign="top" align="center">3.39</td>
<td valign="middle" align="center">43</td>
<td valign="top" align="center">23.80</td>
<td valign="top" align="center">22.46</td>
<td valign="top" align="center">1.34</td>
</tr>
<tr>
<td valign="top" align="center">14</td>
<td valign="top" align="center">24.47</td>
<td valign="top" align="center">25.90</td>
<td valign="top" align="center">1.43</td>
<td valign="middle" align="center">44</td>
<td valign="top" align="center">19.13</td>
<td valign="top" align="center">20.49</td>
<td valign="top" align="center">1.36</td>
</tr>
<tr>
<td valign="top" align="center">15</td>
<td valign="top" align="center">31.43</td>
<td valign="top" align="center">29.72</td>
<td valign="top" align="center">1.71</td>
<td valign="middle" align="center">45</td>
<td valign="top" align="center">24.75</td>
<td valign="top" align="center">22.37</td>
<td valign="top" align="center">2.38</td>
</tr>
<tr>
<td valign="top" align="center">16</td>
<td valign="top" align="center">29.78</td>
<td valign="top" align="center">30.43</td>
<td valign="top" align="center">0.65</td>
<td valign="middle" align="center">46</td>
<td valign="top" align="center">21.55</td>
<td valign="top" align="center">23.27</td>
<td valign="top" align="center">1.72</td>
</tr>
<tr>
<td valign="top" align="center">17</td>
<td valign="top" align="center">26.39</td>
<td valign="top" align="center">30.76</td>
<td valign="top" align="center">4.37</td>
<td valign="middle" align="center">47</td>
<td valign="top" align="center">25.00</td>
<td valign="top" align="center">21.58</td>
<td valign="top" align="center">3.42</td>
</tr>
<tr>
<td valign="top" align="center">18</td>
<td valign="top" align="center">43.04</td>
<td valign="top" align="center">40.61</td>
<td valign="top" align="center">2.43</td>
<td valign="middle" align="center">48</td>
<td valign="top" align="center">28.75</td>
<td valign="top" align="center">24.36</td>
<td valign="top" align="center">4.39</td>
</tr>
<tr>
<td valign="top" align="center">19</td>
<td valign="top" align="center">25.83</td>
<td valign="top" align="center">29.61</td>
<td valign="top" align="center">3.78</td>
<td valign="middle" align="center">49</td>
<td valign="top" align="center">21.30</td>
<td valign="top" align="center">17.55</td>
<td valign="top" align="center">3.75</td>
</tr>
<tr>
<td valign="top" align="center">20</td>
<td valign="top" align="center">21.19</td>
<td valign="top" align="center">21.67</td>
<td valign="top" align="center">0.48</td>
<td valign="middle" align="center">50</td>
<td valign="top" align="center">20.21</td>
<td valign="top" align="center">19.38</td>
<td valign="top" align="center">0.83</td>
</tr>
<tr>
<td valign="top" align="center">21</td>
<td valign="top" align="center">31.30</td>
<td valign="top" align="center">28.86</td>
<td valign="top" align="center">2.44</td>
<td valign="middle" align="center">51</td>
<td valign="top" align="center">17.27</td>
<td valign="top" align="center">17.89</td>
<td valign="top" align="center">0.62</td>
</tr>
<tr>
<td valign="top" align="center">22</td>
<td valign="top" align="center">23.62</td>
<td valign="top" align="center">23.56</td>
<td valign="top" align="center">0.06</td>
<td valign="middle" align="center">52</td>
<td valign="top" align="center">20.81</td>
<td valign="top" align="center">21.52</td>
<td valign="top" align="center">0.71</td>
</tr>
<tr>
<td valign="top" align="center">23</td>
<td valign="top" align="center">22.22</td>
<td valign="top" align="center">23.47</td>
<td valign="top" align="center">1.25</td>
<td valign="middle" align="center">53</td>
<td valign="top" align="center">25.75</td>
<td valign="top" align="center">23.35</td>
<td valign="top" align="center">2.40</td>
</tr>
<tr>
<td valign="top" align="center">24</td>
<td valign="top" align="center">23.85</td>
<td valign="top" align="center">24.76</td>
<td valign="top" align="center">0.91</td>
<td valign="middle" align="center">54</td>
<td valign="top" align="center">29.29</td>
<td valign="top" align="center">22.64</td>
<td valign="top" align="center">6.65</td>
</tr>
<tr>
<td valign="top" align="center">25</td>
<td valign="top" align="center">20.60</td>
<td valign="top" align="center">21.85</td>
<td valign="top" align="center">1.25</td>
<td valign="middle" align="center">55</td>
<td valign="top" align="center">26.59</td>
<td valign="top" align="center">25.62</td>
<td valign="top" align="center">0.97</td>
</tr>
<tr>
<td valign="top" align="center">26</td>
<td valign="top" align="center">24.64</td>
<td valign="top" align="center">24.51</td>
<td valign="top" align="center">0.13</td>
<td valign="middle" align="center">56</td>
<td valign="top" align="center">23.10</td>
<td valign="top" align="center">22.60</td>
<td valign="top" align="center">0.50</td>
</tr>
<tr>
<td valign="top" align="center">27</td>
<td valign="top" align="center">24.47</td>
<td valign="top" align="center">26.47</td>
<td valign="top" align="center">2.00</td>
<td valign="middle" align="center">57</td>
<td valign="top" align="center">20.00</td>
<td valign="top" align="center">20.67</td>
<td valign="top" align="center">0.67</td>
</tr>
<tr>
<td valign="top" align="center">28</td>
<td valign="top" align="center">22.89</td>
<td valign="top" align="center">23.77</td>
<td valign="top" align="center">0.88</td>
<td valign="middle" align="center">58</td>
<td valign="top" align="center">23.68</td>
<td valign="top" align="center">21.74</td>
<td valign="top" align="center">1.94</td>
</tr>
<tr>
<td valign="top" align="center">29</td>
<td valign="top" align="center">22.95</td>
<td valign="top" align="center">21.24</td>
<td valign="top" align="center">1.71</td>
<td valign="middle" align="center">59</td>
<td valign="top" align="center">19.47</td>
<td valign="top" align="center">17.49</td>
<td valign="top" align="center">1.98</td>
</tr>
<tr>
<td valign="top" align="center">30</td>
<td valign="top" align="center">22.50</td>
<td valign="top" align="center">21.55</td>
<td valign="top" align="center">0.95</td>
<td valign="middle" align="center">60</td>
<td valign="top" align="center">22.25</td>
<td valign="top" align="center">19.60</td>
<td valign="top" align="center">2.65</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>According to the data in <xref ref-type="table" rid="T2">
<bold>Table&#xa0;2</bold>
</xref>, the <italic>MAPE</italic>, <italic>MAE</italic>, and <italic>RMSE</italic> for the 60 sets of maize stem diameter measurements are 7.34%, 1.82mm, and 2.22mm, respectively. A comparative analysis with the errors obtained from the maize stem diameter measurements using the method described in this paper reveals that the values of <italic>MAE</italic>, <italic>MAPE</italic>, and <italic>RMSE</italic> derived from depth information exhibit lower figures. Specifically, the <italic>MAPE</italic>, <italic>MAE</italic>, and <italic>RMSE</italic> demonstrated reductions of 4.33%, 1.07mm, and 1.15mm, respectively. Given the aforementioned analysis, it is concluded that the precision of field maize stem diameter measurements derived from depth information surpasses that obtained by methods based on the pinhole imaging principle.</p>
<p>To visually illustrate the differences between stem diameter measurements obtained through the pinhole imaging principle and manual measurements, as well as to delineate the variance between measurements derived from the method described in this paper and those obtained manually, this study performed a linear fitting of these datasets. The outcomes of this fitting are depicted in <xref ref-type="fig" rid="f12">
<bold>Figure&#xa0;12</bold>
</xref>.</p>
<fig id="f12" position="float">
<label>Figure&#xa0;12</label>
<caption>
<p>Linear fitting between stem diameter measurements and manual measurements: <bold>(A)</bold> Linear fitting between stem diameter measurements obtained using the pinhole imaging principle and manual measurements. <bold>(B)</bold> Linear fitting between stem diameter measurements obtained from depth information and manual measurements.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1371252-g012.tif"/>
</fig>
<p>The linear fit results shown in <xref ref-type="fig" rid="f12">
<bold>Figure&#xa0;12</bold>
</xref> indicate that the <italic>R</italic>&#xb2; for measurements based on the pinhole imaging principle is 0.82, whereas the <italic>R</italic>&#xb2; for measurements based on depth information is 0.96. These findings substantiate the superior precision of the depth information-based method for determining field maize stem diameters over those obtained by the pinhole imaging principle.</p>
<p>Furthermore, to more comprehensively compare the differences between the two measurement methods in terms of precision, stability, and consistency, this study utilized a combination of box plots and scatter plots to display the distribution of stem diameter measurements based on depth information, manual measurement, and the principle of pinhole imaging. On this basis, statistical difference analysis was conducted. Since all measurement results did not conform to a normal distribution, non-parametric Wilcoxon signed-rank tests were used for the difference analysis. The distribution of results is shown in <xref ref-type="fig" rid="f13">
<bold>Figure&#xa0;13</bold>
</xref>. As shown in <xref ref-type="fig" rid="f13">
<bold>Figure&#xa0;13</bold>
</xref>, the comparison between stem diameter values obtained from depth information and those obtained through manual measurement results in a P-value of 0.5005; the comparison between stem diameter values obtained from the principle of pinhole imaging and those obtained through manual measurement results in a P-value of 0.0736. These results do not provide sufficient statistical evidence to suggest a significant difference between the methods of measurement based on depth information or the principle of pinhole imaging and the method of manual measurement.</p>
<fig id="f13" position="float">
<label>Figure&#xa0;13</label>
<caption>
<p>Distribution of stem diameter measurement results based on depth information, manual measurement, and the pinhole imaging principle.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1371252-g013.tif"/>
</fig>
<p>To further compare the consistency between the two measurement methods, this study employed Lin&#x2019;s Concordance Correlation Coefficient (CCC) to analyze the two methods. This coefficient, by comprehensively evaluating the covariance of the measurements and the differences between their respective means, can effectively reflect the consistency between the results of the two measurement methods. The closer the value of CCC is to 1, the better the consistency between the two measurement methods. The calculation formula for the CCC is delineated in <xref ref-type="disp-formula" rid="eq10">Equation 10</xref>.</p>
<disp-formula id="eq10">
<label>(10)</label>
<mml:math display="block" id="M10">
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">&#x3c1;</mml:mtext>
<mml:mtext mathvariant="bold-italic">c</mml:mtext>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mn mathvariant="bold">2</mml:mn>
<mml:mtext mathvariant="bold-italic">&#x3c1;</mml:mtext>
<mml:msub>
<mml:mtext mathvariant="bold-italic">&#x3c3;</mml:mtext>
<mml:mtext mathvariant="bold-italic">x</mml:mtext>
</mml:msub>
<mml:msub>
<mml:mtext mathvariant="bold-italic">&#x3c3;</mml:mtext>
<mml:mtext mathvariant="bold-italic">y</mml:mtext>
</mml:msub>
</mml:mrow>
<mml:mrow>
<mml:msubsup>
<mml:mi mathvariant="bold-italic">&#x3c3;</mml:mi>
<mml:mtext mathvariant="bold-italic">x</mml:mtext>
<mml:mn mathvariant="bold">2</mml:mn>
</mml:msubsup>
<mml:mo>+</mml:mo>
<mml:msubsup>
<mml:mi mathvariant="bold-italic">&#x3c3;</mml:mi>
<mml:mtext mathvariant="bold-italic">y</mml:mtext>
<mml:mn mathvariant="bold">2</mml:mn>
</mml:msubsup>
<mml:mo>+</mml:mo>
<mml:msup>
<mml:mrow>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mtext mathvariant="bold-italic">&#x3bc;</mml:mtext>
<mml:mtext mathvariant="bold-italic">x</mml:mtext>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mtext mathvariant="bold-italic">&#x3bc;</mml:mtext>
<mml:mtext mathvariant="bold-italic">y</mml:mtext>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:mrow>
<mml:mn mathvariant="bold">2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:mfrac>
</mml:mrow>
</mml:math>
</disp-formula>
<p>Here, <italic>&#x3c1;</italic> represents the Pearson correlation coefficient between the two sets of measurements, <italic>&#x3c3;<sub>x</sub>
</italic> and <italic>&#x3c3;<sub>y</sub>
</italic> are the standard deviations of the two sets of measurements, <italic>&#x3bc;<sub>x</sub>
</italic> and <italic>&#x3bc;<sub>y</sub>
</italic> are the means of the two sets of measurements, <italic>&#x3c1;<sub>c</sub>
</italic> is Lin&#x2019;s Concordance Correlation Coefficient.</p>
<p>Through calculation, it can be determined that the CCC between stem diameter values obtained from depth information and manual measurements is 0.978, while the CCC between stem diameter values obtained from the principle of pinhole imaging and manual measurements is 0.909. These results indicate that, compared to the principle of pinhole imaging, the measurement method based on depth information shows a closer alignment with manual measurement results, demonstrating better consistency.</p>
<p>Furthermore, as indicated by the box plots in <xref ref-type="fig" rid="f13">
<bold>Figure&#xa0;13</bold>
</xref>, the distribution widths for maize stem diameters measured using depth information, manual techniques, and the pinhole imaging principle are 18.59mm, 18.58mm, and 19.65mm, respectively. The interquartile ranges are 5.78mm, 6.17mm, and 6.52mm, respectively, and the medians are 23.74mm, 23.64mm, and 24.47mm, respectively. In comparison to the pinhole imaging principle, the median values of maize stem diameters measured using depth information more closely align with those obtained by manual measurement, further validating its advantage in precision. Additionally, the distribution width and interquartile range of maize stem diameters gathered from depth information are also closer to those from manual measurements, indicating its superior performance in terms of stability and consistency. In summary, from the perspectives of accuracy, stability, and consistency, the method of acquiring field maize stem diameters based on depth information has demonstrated superior performance.</p>
</sec>
</sec>
<sec id="s4" sec-type="discussion">
<label>4</label>
<title>Discussion</title>
<p>In response to the constraints presented by conventional, laborious phenotypic measurements in agronomic research, this study proposes an innovative method for the quantification of maize stem diameter <italic>in situ</italic> employing depth information from an RGB-D camera. RGB images, depth maps, and 3D point clouds of maize stems in the field were captured using an Intel RealSense D435i camera. An effective solution for the precise alignment of RGB and depth images is provided by the rs.align class within the pyrealsense2 library. Furthermore, the automation of acquiring 2D pixel coordinates is enhanced by utilizing a skeleton extraction algorithm based on thinning techniques. The integration of depth information with intrinsic parameters of the camera enables the transformation of 2D pixel coordinates into 3D world coordinates through a back-projection transformation. Subsequently, through rigid transformation techniques, these 3D world coordinates are precisely mapped onto the 3D point cloud. In conclusion, the quantification of maize stem diameter was accomplished by computing the Euclidean distance between pairs of 3D world coordinates. The empirical outcomes substantiated the precision, reliability, and uniformity of the proposed method for acquiring field maize stem diameters utilizing depth information derived from an RGB-D camera.</p>
<p>Relative to analogous technologies, the technique for measuring the diameter of maize stems in the field via depth information from an RGB-D camera has exhibited specific advantages. Initially, lidar technology has been demonstrated to be effective for acquiring the diameter of maize stems. <xref ref-type="bibr" rid="B26">Miao et&#xa0;al. (2022)</xref> collected 3D point cloud data of maize across extensive fields employing terrestrial laser scanning and obtained the stem diameters by applying elliptical fitting techniques, achieving an <italic>R</italic>&#xb2; in excess of 0.8. <xref ref-type="bibr" rid="B24">Ma et&#xa0;al. (2019)</xref> employed a handheld lidar to obtain the diameter of potted maize stems, achieving an <italic>R</italic>&#xb2; of 0.89. Nonetheless, given that lidar technology utilizes laser beams to gauge object surface distances, its utility is primarily confined to the acquisition of 3D point cloud data of maize plants, with an inherent limitation in gathering chromatic information. Moreover, the production and maintenance costs associated with this technology are considerable. In contrast, the Intel RealSense D435i camera employed in this research possesses the capability to concurrently capture color imagery, depth maps, and 3D point clouds of maize stems. Color data play a pivotal role in aiding researchers to diagnose crop diseases and infestations (<xref ref-type="bibr" rid="B10">Deng et&#xa0;al., 2020</xref>). Additionally, this camera is not only economical and portable but also amenable to further development. Additionally, cameras based on the TOF principle can also be employed to measure the diameter of maize stems. <xref ref-type="bibr" rid="B7">Chaivivatrakul et&#xa0;al. (2014)</xref> utilized a TOF camera to collect 3D point cloud data of indoor potted maize and successfully extracted the stem diameter using elliptical fitting techniques, with an <italic>R</italic>&#xb2; of 0.84. <xref ref-type="bibr" rid="B4">Bao et&#xa0;al. (2019)</xref> captured 3D point cloud data for maize in field conditions utilizing a side-view TOF camera and extracted the stem diameter through a method based on 3D skeletal lines. However, the <italic>R</italic>&#xb2; was a mere 0.27, indicating lower precision. The observed discrepancy in accuracy between the two referenced studies may be ascribed to the inherent resolution constraints of the TOF camera coupled with its pronounced susceptibility to ambient natural light, culminating in suboptimal measurements within outdoor settings (<xref ref-type="bibr" rid="B18">Kazmi et&#xa0;al., 2014</xref>). Compared to the time-of-flight imaging technology of TOF cameras, the Intel RealSense D435i camera employs stereo vision technology, which enables it to provide more robust high-resolution depth data in outdoor environments (<xref ref-type="bibr" rid="B35">Vit and Shani, 2018</xref>). Conclusively, contact measurement techniques are also viable for determining the diameter of maize stems. <xref ref-type="bibr" rid="B2">Atefi et&#xa0;al. (2020)</xref> utilized a robotic system fitted with fixtures to measure the diameters of maize and sorghum stems under controlled laboratory conditions, yielding <italic>R</italic>&#xb2; values of 0.98 and 0.99, respectively. Such precision underscores the high accuracy of the measurement methods. Nevertheless, contact measurement methods require a high level of operator skill, and any mishandling might inflict damage on the maize stems. By contrast, this study utilizes non-invasive imaging technologies for the measurement of maize stem diameters, a method that obviates the need for physical contact with the stems and consequently mitigates the risk of damage to the crops.</p>
<p>In the complex field environment, developing an imaging system that can adapt to diverse environmental factors has always been a scientific challenge. Although RGB-D cameras based on depth information have successfully acquired the diameter of field maize stems to a certain extent, they also present some issues that require further investigation. The principal challenge encountered in field phenotyping is the substantial effect of ambient illumination on image quality. The Intel RealSense D435i camera, amongst a range of RGB-D imaging devices, manifests reduced sensitivity to light variation. Nevertheless, its operational performance can be compromised under the intense illumination characteristic of peak midday sun (<xref ref-type="bibr" rid="B35">Vit and Shani, 2018</xref>). Future research will employ near-infrared filters to optimize camera performance in bright light conditions (<xref ref-type="bibr" rid="B12">Gai et&#xa0;al., 2015</xref>; <xref ref-type="bibr" rid="B15">He et&#xa0;al., 2021</xref>). Additionally, the current data collection is limited to clear weather conditions and does not encompass overcast conditions. Therefore, future research will consider data collection under various weather conditions to more comprehensively evaluate the applicability and robustness of the method presented in this paper. In addition, this study was conducted using a relatively sparse planting pattern, which, to some extent, reduced the interference from adjacent plants on the experiments. However, it also lacked observation of plants under conventional planting patterns. Therefore, future research will continue to optimize the experimental design, thereby exploring the applicability of the methods presented in this paper under conventional planting patterns. Moreover, while side-view imaging technology facilitates the acquisition of the 3D morphology and diameter of maize stems within field conditions, the method of collection from a single angle makes it difficult to present a comprehensive 3D phenotype of the maize stems. Consequently, the pursuit of a method that yields a more holistic 3D phenotype of maize stems will become one of the important directions for future research. Finally, depth information based on RGB-D cameras has proven effective for determining the diameter of maize stems under open field conditions. However, the generalizability of this approach to other crops necessitates additional experimental validation.</p>
</sec>
<sec id="s5" sec-type="conclusions">
<label>5</label>
<title>Conclusion</title>
<p>This study proposes a method for acquiring the diameter of maize stems in the field based on depth information from RGB-D cameras. Initially, the contour of the maize stems was obtained through 2D image processing techniques. Subsequently, a skeleton extraction algorithm based on thinning techniques was employed to assist in the acquisition of 2D pixel coordinates. Furthermore, back-projection transformation and rigid transformation techniques are applied to convert 2D pixel coordinates into 3D world coordinates, which are then mapped onto a 3D point cloud. Lastly, the Euclidean distance was applied to calculate the diameter of maize stems, resulting in a <italic>MAPE</italic> of 3.01%, an <italic>MAE</italic> of 0.75mm, a <italic>RMSE</italic> of 1.07mm, and an <italic>R</italic>&#xb2; of 0.96. Compared with measurement methods based on the pinhole imaging principle, there was a reduction in the <italic>MAE</italic>, <italic>MAPE</italic>, and <italic>RMSE</italic> by 1.07mm, 4.33%, and 1.15mm, respectively. Concurrently, there was an increase of 0.14 in the <italic>R</italic>&#xb2;. The method of acquiring the diameter of field maize stems using depth information from RGB-D cameras maintains the <italic>MAE</italic> within 1.1mm and the <italic>MAPE</italic> within 3.1%, enabling accurate measurement of maize stem diameter. Additionally, this method utilizes non-invasive imaging technology that not only ensures measurement accuracy but also precludes damage to crop surfaces, presenting the possibility to supplant Vernier calipers for monitoring phenotypes of field maize. In the future, should this method be broadly adopted for phenotypic monitoring across diverse crop species, it has the potential to markedly diminish the time and labor required for manual measurements, thereby providing strong technical support for agricultural modernization and precision agriculture.</p>
</sec>
<sec id="s6" sec-type="data-availability">
<title>Data availability statement</title>
<p>The datasets presented in this study can be found in online repositories. The names of the repository/repositories and accession number(s) can be found below: <uri xlink:href="http://dx.doi.org/10.6084/m9.figshare.25450039">http://dx.doi.org/10.6084/m9.figshare.25450039</uri>.</p>
</sec>
<sec id="s7" sec-type="author-contributions">
<title>Author contributions</title>
<p>JZho: Conceptualization, Funding acquisition, Methodology, Writing &#x2013; original draft. MC: Data curation, Formal Analysis, Methodology, Writing &#x2013; original draft. YW: Formal Analysis, Methodology, Visualization, Writing &#x2013; review &amp; editing. YG: Methodology, Validation, Writing &#x2013; review &amp; editing. YT: Data curation, Formal Analysis, Writing &#x2013; review &amp; editing. BJ: Investigation, Writing &#x2013; review &amp; editing. MW: Project administration, Writing &#x2013; review &amp; editing. JZha: Supervision, Writing &#x2013; review &amp; editing. LH: Formal Analysis, Supervision, Writing &#x2013; original draft.</p>
</sec>
</body>
<back>
<sec id="s8" sec-type="funding-information">
<title>Funding</title>
<p>The author(s) declare that financial support was received for the research, authorship, and/or publication of this article. This study was supported by the National Key R&amp;D Program of China (2022YFD2001602), the Jilin Provincial Department of science and technology (20230202042NC) and the National Innovation and Entrepreneurship Training Project for University (China) (202310193065).</p>
</sec>
<sec id="s9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="s10" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<sec id="s11" sec-type="supplementary-material">
<title>Supplementary material</title>
<p>The Supplementary Material for this article can be found online at: <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/articles/10.3389/fpls.2024.1371252/full#supplementary-material">https://www.frontiersin.org/articles/10.3389/fpls.2024.1371252/full#supplementary-material</ext-link></p>
<supplementary-material xlink:href="Table_1.docx" id="SM1" mimetype="application/vnd.openxmlformats-officedocument.wordprocessingml.document"/>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Arief</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Nugroho</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Putro</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Dananta</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Masithoh</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Sutiarso</surname> <given-names>L.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). &#x201c;<article-title>Three-dimensional (3D) reconstruction for non-destructive plant growth observation system using close-range photogrammetry method</article-title>,&#x201d; in <source>IOP Conference Series: Earth and Environmental Science</source> (<publisher-name>IOP Publishing</publisher-name>, <publisher-loc>Malang, Indonesia</publisher-loc>), <fpage>012028</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1088/1755-1315/733/1/012028</pub-id>
</citation>
</ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Atefi</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Ge</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Pitla</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Schnable</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Robotic detection and grasp of maize and sorghum: stem measurement with contact</article-title>. <source>Robotics</source> <volume>9</volume>, <elocation-id>58</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/robotics9030058</pub-id>
</citation>
</ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Baharav</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Bariya</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Zakhor</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>
<italic>In situ</italic> height and width estimation of sorghum plants from 2.5d infrared images</article-title>. <source>Electronic Imaging.</source> <volume>2017</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.2352/ISSN.2470-1173.2017.17.COIMG-435</pub-id>
</citation>
</ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bao</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Tang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Srinivasan</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Schnable</surname> <given-names>P. S.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Field-based architectural traits characterisation of maize plant using time-of-flight 3D imaging</article-title>. <source>Biosyst. Eng.</source> <volume>178</volume>, <fpage>86</fpage>&#x2013;<lpage>101</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2018.11.005</pub-id>
</citation>
</ref>
<ref id="B5">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Batz</surname> <given-names>J.</given-names>
</name>
<name>
<surname>M&#xe9;ndez-Dorado</surname> <given-names>M. A.</given-names>
</name>
<name>
<surname>Thomasson</surname> <given-names>J. A.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Imaging for high-throughput phenotyping in energy sorghum</article-title>. <source>J. Imaging.</source> <volume>2</volume>, <elocation-id>4</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/jimaging2010004</pub-id>
</citation>
</ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bothast</surname> <given-names>R. J.</given-names>
</name>
<name>
<surname>Schlicher</surname> <given-names>M. A.</given-names>
</name>
</person-group> (<year>2005</year>). <article-title>Biotechnological processes for conversion of corn into ethanol</article-title>. <source>Appl. Microbiol. Biotechnol.</source> <volume>67</volume>, <fpage>19</fpage>&#x2013;<lpage>25</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00253-004-1819-8</pub-id>
</citation>
</ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chaivivatrakul</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Tang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Dailey</surname> <given-names>M. N.</given-names>
</name>
<name>
<surname>Nakarmi</surname> <given-names>A. D.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Automatic morphological trait characterization for corn plants via 3D holographic reconstruction</article-title>. <source>Comput. Electron. Agric.</source> <volume>109</volume>, <fpage>109</fpage>&#x2013;<lpage>123</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2014.09.005</pub-id>
</citation>
</ref>
<ref id="B8">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Chen</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Drechsler</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Zhao</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Laura</surname> <given-names>C. O.</given-names>
</name>
</person-group> (<year>2011</year>). &#x201c;<article-title>A thinning-based liver vessel skeletonization method</article-title>,&#x201d; in <conf-name>2011 International Conference on Internet Computing and Information Services</conf-name>, <conf-loc>Hong Kong, China</conf-loc>. <fpage>152</fpage>&#x2013;<lpage>155</lpage> (<publisher-loc>New York, New York</publisher-loc>: <publisher-name>IEEE</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ICICIS.2011.44</pub-id>
</citation>
</ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chene</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Rousseau</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Lucidarme</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Bertheloot</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Caffier</surname> <given-names>V.</given-names>
</name>
<name>
<surname>Morel</surname> <given-names>P.</given-names>
</name>
<etal/>
</person-group>. (<year>2012</year>). <article-title>On the use of depth camera for 3D phenotyping of entire plants</article-title>. <source>Comput. Electron. Agric.</source> <volume>82</volume>, <fpage>122</fpage>&#x2013;<lpage>127</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2011.12.007</pub-id>
</citation>
</ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Deng</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>He</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Dong</surname> <given-names>Y.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>Application of agricultural insect pest detection and control map based on image processing analysis</article-title>. <source>J. Intell. Fuzzy Syst.</source> <volume>38</volume>, <fpage>379</fpage>&#x2013;<lpage>389</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3233/JIFS-179413</pub-id>
</citation>
</ref>
<ref id="B11">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Duvick</surname> <given-names>D. N.</given-names>
</name>
</person-group> (<year>2005</year>). &#x201c;<article-title>The Contribution of Breeding to Yield Advances in maize (Zea mays L.)</article-title>,&#x201d; in <source>Advances in agronomy</source>. Ed. <person-group person-group-type="editor">
<name>
<surname>Sparks</surname> <given-names>D. L.</given-names>
</name>
</person-group>, (<publisher-loc>San Diego, California</publisher-loc>: <publisher-name>Elsevier</publisher-name>) <fpage>83</fpage>&#x2013;<lpage>145</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/S0065-2113(05)86002-X</pub-id>
</citation>
</ref>
<ref id="B12">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Gai</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Tang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Steward</surname> <given-names>B.</given-names>
</name>
</person-group> (<year>2015</year>). &#x201c;<article-title>Plant recognition through the fusion of 2D and 3D images for robotic weeding</article-title>,&#x201d; in <conf-name>2015 ASABE Annual International Meeting</conf-name>, <conf-loc>New Orleans, Louisiana</conf-loc>. <fpage>1</fpage> (<publisher-loc>Saint Joseph, Michigan</publisher-loc>: <publisher-name>American Society of Agricultural and Biological Engineers</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.13031/aim.20152181371</pub-id>
</citation>
</ref>
<ref id="B13">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Grunnet-Jepsen</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Sweestser</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Khuong</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Dorodnicov</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Tong</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Mulla</surname> <given-names>O. E. ,. H.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <source>Intel&#xae; RealSense&#x2122; Self-Calibration for D400 Series Depth Cameras</source> (<publisher-name>California, USA, Intel, White Paper</publisher-name>).</citation>
</ref>
<ref id="B14">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Grunnet-Jepsen</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Sweetser</surname> <given-names>J. N.</given-names>
</name>
<name>
<surname>Woodfill</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Best-known-methods for tuning intel&#xae; realsense&#x2122; d400 depth cameras for best performance</article-title>. <source>New Technol. Group Intel Corporation</source>. Rev 1.9.</citation>
</ref>
<ref id="B15">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>He</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Dong</surname> <given-names>Q.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). &#x201c;<article-title>Fast-dynamic-vision: Detection and tracking dynamic objects with event and depth sensing</article-title>,&#x201d; in <conf-name>2021 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</conf-name>, <conf-loc>Prague, Czech Republic</conf-loc>. <fpage>3071</fpage>&#x2013;<lpage>3078</lpage> (<publisher-loc>New York, New York</publisher-loc>: <publisher-name>IEEE</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.1109/IROS51168.2021.9636448</pub-id>
</citation>
</ref>
<ref id="B16">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Jin</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Saha</surname> <given-names>P. K.</given-names>
</name>
</person-group> (<year>2013</year>). &#x201c;<article-title>A new fuzzy skeletonization algorithm and its applications to medical imaging</article-title>,&#x201d; in <conf-name>Image Analysis and Processing&#x2013;ICIAP 2013: 17th International Conference</conf-name>, <conf-loc>Naples, Italy</conf-loc>, <conf-date>September 9-13, 2013</conf-date>. <fpage>662</fpage>&#x2013;<lpage>671</lpage> (<publisher-loc>Berlin and Heidelberg</publisher-loc>: <publisher-name>Springer, Naples, Italy</publisher-name>), Proceedings, Part I 17. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/978-3-642-41181-6_67</pub-id>
</citation>
</ref>
<ref id="B17">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jin</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Kim</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>A 3D skeletonization algorithm for 3D mesh models using a partial parallel 3D thinning algorithm and 3D skeleton correcting algorithm</article-title>. <source>Appl. Sci.</source> <volume>7</volume>, <elocation-id>139</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/app7020139</pub-id>
</citation>
</ref>
<ref id="B18">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kazmi</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Foix</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Alenya</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Andersen</surname> <given-names>H. J.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Indoor and outdoor depth imaging of leaves with time-of-flight and stereo vision sensors: Analysis and comparison</article-title>. <source>ISPRS J. Photogramm. Remote Sens.</source> <volume>88</volume>, <fpage>128</fpage>&#x2013;<lpage>146</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.isprsjprs.2013.11.012</pub-id>
</citation>
</ref>
<ref id="B19">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kelly</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Crain</surname> <given-names>J. L.</given-names>
</name>
<name>
<surname>Raun</surname> <given-names>W. R.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>By-plant prediction of corn (Zea mays L.) grain yield using height and stalk diameter</article-title>. <source>Commun. Soil Sci. Plant Anal.</source> <volume>46</volume>, <fpage>564</fpage>&#x2013;<lpage>575</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/00103624.2014.998340</pub-id>
</citation>
</ref>
<ref id="B20">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Wen</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Guo</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Gu</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Yan</surname> <given-names>H.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>High-throughput phenotyping analysis of maize at the seedling stage using end-to-end segmentation network</article-title>. <source>PloS One</source> <volume>16</volume>, <elocation-id>e0241528</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pone.0241528</pub-id>
</citation>
</ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Jin</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Ding</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>C.</given-names>
</name>
<etal/>
</person-group>. (<year>2017</year>). <article-title>Evaluation of seed emergence uniformity of mechanically sown wheat with UAV RGB imagery</article-title>. <source>Remote Sens.</source> <volume>9</volume>, <elocation-id>1241</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs9121241</pub-id>
</citation>
</ref>
<ref id="B22">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Shao</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Han</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>He</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yin</surname> <given-names>Z.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Genetic architecture of maize stalk diameter and rind penetrometer resistance in a recombinant inbred line population</article-title>. <source>Genes</source> <volume>13</volume>, <elocation-id>579</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/genes13040579</pub-id>
</citation>
</ref>
<ref id="B23">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Ye</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Feng</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>Q.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>PocketMaize: an android-smartphone application for maize plant phenotyping</article-title>. <source>Front. Plant Sci.</source> <volume>12</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2021.770217</pub-id>
</citation>
</ref>
<ref id="B24">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ma</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Guan</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Feng</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Calculation method for phenotypic traits based on the 3D reconstruction of maize canopies</article-title>. <source>Sensors</source> <volume>19</volume>, <fpage>1201</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s19051201</pub-id>
</citation>
</ref>
<ref id="B25">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Malik</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Qiu</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Tomato segmentation and localization method based on RGB-D camera</article-title>. <source>Int. Agric. Eng. J.</source> <volume>28</volume>, <fpage>49</fpage>.</citation>
</ref>
<ref id="B26">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Miao</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Peng</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Qiu</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Measurement method of maize morphological parameters based on point cloud image conversion</article-title>. <source>Comput. Electron. Agric.</source> <volume>199</volume>, <elocation-id>107174</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2022.107174</pub-id>
</citation>
</ref>
<ref id="B27">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mousavi</surname> <given-names>S. M. N.</given-names>
</name>
<name>
<surname>Illes</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Bojtor</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Nagy</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>The impact of different nutritional treatments on maize hybrids morphological traits based on stability statistical methods</article-title>. <source>Emir. J. Food Agric.</source> <volume>32</volume>, <fpage>666</fpage>&#x2013;<lpage>672</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.9755/ejfa.2020.v32.i9.2147</pub-id>
</citation>
</ref>
<ref id="B28">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Nuss</surname> <given-names>E. T.</given-names>
</name>
<name>
<surname>Tanumihardjo</surname> <given-names>S. A.</given-names>
</name>
</person-group> (<year>2010</year>). <article-title>Maize: A paramount staple crop in the context of global nutrition</article-title>. <source>Compr. Rev. Food Sci. Food Saf.</source> <volume>9</volume>, <fpage>417</fpage>&#x2013;<lpage>436</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/j.1541-4337.2010.00117.x</pub-id>
</citation>
</ref>
<ref id="B29">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Patel</surname> <given-names>K. K.</given-names>
</name>
<name>
<surname>Kar</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Jha</surname> <given-names>S. N.</given-names>
</name>
<name>
<surname>Khan</surname> <given-names>M. A.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>Machine vision system: a tool for quality inspection of food and agricultural products</article-title>. <source>J. Food Sci. Technol.</source> <volume>49</volume>, <fpage>123</fpage>&#x2013;<lpage>141</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s13197-011-0321-4</pub-id>
</citation>
</ref>
<ref id="B30">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Qiao</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zheng</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Qu</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Guo</surname> <given-names>T.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>A diameter measurement method of red jujubes trunk based on improved PSPNet</article-title>. <source>Agriculture</source> <volume>12</volume>, <elocation-id>1140</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture12081140</pub-id>
</citation>
</ref>
<ref id="B31">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Qiu</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Miao</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Detection of the 3D temperature characteristics of maize under water stress using thermal and RGB-D cameras</article-title>. <source>Comput. Electron. Agric.</source> <volume>191</volume>, <elocation-id>106551</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2021.106551</pub-id>
</citation>
</ref>
<ref id="B32">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Rahimifard</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Sheldrick</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Woolley</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Colwill</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Sachidananda</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2013</year>). &#x201c;<article-title>How to manufacture a sustainable future for 9 billion people in 2050</article-title>,&#x201d; in <conf-name>Re-engineering Manufacturing for Sustainability: Proceedings of the 20th CIRP International Conference on Life Cycle Engineering, Singapore 17-19 April, 2013</conf-name>. <source>Re-engineering Manufacturing for Sustainability</source>, eds. <person-group person-group-type="editor">
<name>
<surname>Ong</surname> <given-names>A. Y. C. N. B. S. S.-K.</given-names>
</name>
</person-group> (<publisher-loc>Singapore</publisher-loc>: <publisher-name>Springer</publisher-name>), <fpage>1</fpage>&#x2013;<lpage>8</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/978-981-4451-48-2_1</pub-id>
</citation>
</ref>
<ref id="B33">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shen</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Liang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Tian</surname> <given-names>Z.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Omics-based interdisciplinarity is accelerating plant breeding</article-title>. <source>Curr. Opin. Plant Biol.</source> <volume>66</volume>, <elocation-id>102167</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.pbi.2021.102167</pub-id>
</citation>
</ref>
<ref id="B34">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Song</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Duan</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Zou</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Fu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Ou</surname> <given-names>Z.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>A three-dimensional reconstruction algorithm for extracting parameters of the banana pseudo-stem</article-title>. <source>Optik</source> <volume>185</volume>, <fpage>486</fpage>&#x2013;<lpage>496</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ijleo.2019.03.125</pub-id>
</citation>
</ref>
<ref id="B35">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Vit</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Shani</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Comparing RGB-D sensors for close range outdoor agricultural phenotyping</article-title>. <source>Sensors</source> <volume>18</volume>, <elocation-id>4413</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s18124413</pub-id>
</citation>
</ref>
<ref id="B36">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Managing land carrying capacity: Key to achieving sustainable production systems for food security</article-title>. <source>Land</source> <volume>11</volume>, <elocation-id>484</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/land11040484</pub-id>
</citation>
</ref>
<ref id="B37">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Non-destructive measurement of three-dimensional plants based on point cloud</article-title>. <source>Plants</source> <volume>9</volume>, <elocation-id>571</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/plants9050571</pub-id>
</citation>
</ref>
<ref id="B38">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>C.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Size estimation of sweet onions using consumer-grade RGB-depth sensor</article-title>. <source>J. Food Eng.</source> <volume>142</volume>, <fpage>153</fpage>&#x2013;<lpage>162</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jfoodeng.2014.06.019</pub-id>
</citation>
</ref>
<ref id="B39">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Xu</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Tong</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Bie</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>High-throughput measurement system for 3D phenotype of cucumber seedlings using RGB-D camera</article-title>. <source>Trans. Chin. Soc Agric. Mach.</source> <volume>54</volume>, <fpage>204</fpage>&#x2013;<lpage>213+281</lpage>.</citation>
</ref>
<ref id="B40">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Grift</surname> <given-names>T. E.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>A monocular vision-based diameter sensor for Miscanthus giganteus</article-title>. <source>Biosyst. Eng.</source> <volume>111</volume>, <fpage>298</fpage>&#x2013;<lpage>304</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2011.12.007</pub-id>
</citation>
</ref>
<ref id="B41">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zheng</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Ge</surname> <given-names>F.</given-names>
</name>
<etal/>
</person-group>. (<year>2018</year>). <article-title>Multi-locus genome-wide association study reveals the genetic architecture of stalk lodging resistance-related traits in maize</article-title>. <source>Front. Plant Sci.</source> <volume>9</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2018.00611</pub-id>
</citation>
</ref>
<ref id="B42">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhou</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Cui</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Gao</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Tang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>Z.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>a). <article-title>Maize (Zea mays L.) stem target region extraction and stem diameter measurement based on an internal gradient algorithm in field conditions</article-title>. <source>Agronomy</source> <volume>13</volume>, <elocation-id>1185</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy13051185</pub-id>
</citation>
</ref>
<ref id="B43">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhou</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Cui</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Gao</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Meng</surname> <given-names>K.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>b). <article-title>Maize stem contour extraction and diameter measurement based on adaptive threshold segmentation in field conditions</article-title>. <source>Agriculture</source> <volume>13</volume>, <elocation-id>678</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture13030678</pub-id>
</citation>
</ref>
<ref id="B44">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhou</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Liang</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>B.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>An integrated skeleton extraction and pruning method for spatial recognition of maize seedlings in MGV and UAV remote images</article-title>. <source>IEEE Trans. Geosci. Remote Sens.</source> <volume>56</volume>, <fpage>4618</fpage>&#x2013;<lpage>4632</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TGRS.2018.2830823</pub-id>
</citation>
</ref>
</ref-list>
</back>
</article>