<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="review-article" dtd-version="1.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Plant Sci.</journal-id>
<journal-title-group>
<journal-title>Frontiers in Plant Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Plant Sci.</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">1664-462X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpls.2026.1778541</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Review</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Advancements and prospects in key technologies for robotic pollination in greenhouse pepper breeding: a review</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Kuang</surname><given-names>Minqiu</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3099675/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Li</surname><given-names>Xiaojian</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="visualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/visualization/">Visualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Xie</surname><given-names>Fangping</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>*</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project-administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Zou</surname><given-names>Xuejie</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Xiang</surname><given-names>Yang</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/2757724/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Zhang</surname><given-names>Yuxuan</given-names></name>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>*</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3003690/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Liu</surname><given-names>Dawei</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Zou</surname><given-names>Xiangjun</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/903174/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Li</surname><given-names>Xu</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>*</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project-administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
</contrib-group>
<aff id="aff1"><label>1</label><institution>School of Mechanical and Electrical Engineering, Hunan Agricultural University</institution>, <city>Changsha</city>,&#xa0;<country country="cn">China</country></aff>
<aff id="aff2"><label>2</label><institution>Foshan Zhongke Agricultural Robotics and Smart Agriculture Innovation Institute</institution>, <city>Foshan</city>,&#xa0;<country country="cn">China</country></aff>
<aff id="aff3"><label>3</label><institution>Hunan Provincial Key Laboratory of Intelligent Agricultural Machinery Equipment</institution>, <city>Changsha</city>,&#xa0;<country country="cn">China</country></aff>
<aff id="aff4"><label>4</label><institution>College of Intelligent Science and Engineering, Beijing University of Agriculture</institution>, <city>Beijing</city>,&#xa0;<country country="cn">China</country></aff>
<aff id="aff5"><label>5</label><institution>Department of Computer and Electrical Engineering, Mid Sweden University</institution>, <city>Sundsvall</city>,&#xa0;<country country="se">Sweden</country></aff>
<author-notes>
<corresp id="c001"><label>*</label>Correspondence: Fangping Xie, <email xlink:href="mailto:hunanxie2002@163.com">hunanxie2002@163.com</email>; Yuxuan Zhang, <email xlink:href="mailto:yuxuan.zhang@miun.se">yuxuan.zhang@miun.se</email>; Xu Li, <email xlink:href="mailto:leexu@hunau.edu.cn">leexu@hunau.edu.cn</email></corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-02-27">
<day>27</day>
<month>02</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2026</year>
</pub-date>
<volume>17</volume>
<elocation-id>1778541</elocation-id>
<history>
<date date-type="received">
<day>31</day>
<month>12</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>10</day>
<month>02</month>
<year>2026</year>
</date>
<date date-type="rev-recd">
<day>04</day>
<month>02</month>
<year>2026</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2026 Kuang, Li, Xie, Zou, Xiang, Zhang, Liu, Zou and Li.</copyright-statement>
<copyright-year>2026</copyright-year>
<copyright-holder>Kuang, Li, Xie, Zou, Xiang, Zhang, Liu, Zou and Li</copyright-holder>
<license>
<ali:license_ref start_date="2026-02-27">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<p>Robotic pollination represents a pivotal component of smart agriculture, with foundational architectures for target recognition, path planning, and motion control having been progressively established. However, developing an efficient and robust pollination system that integrates perception, decision-making, and execution within real-world scenarios remains confronted with complex challenges. This study systematically reviews recent advancements in the field and distills the core technical issues of greenhouse robotic pollination into three primary domains: target detection and pose estimation, end-effector design, and pollination strategies combined with motion control. Focusing on the visual perception of flowers, actuator architecture, and operational tactics, this review synthesizes existing academic findings to evaluate the state-of-the-art in flower detection and pose estimation, characterize diverse end-effector designs, and analyze the evolutionary trajectory of motion control techniques. Specifically, the analysis encompasses the impact of detection algorithms on recognition accuracy and robustness, the structural classification and performance attributes of pollination mechanisms, and the optimization of control strategies. Furthermore, the study categorizes global research backgrounds, technical methodologies, and paradigmatic system cases, offering a critical evaluation of experiences in constructing automated pollination systems. Despite these advances, current robotic pollination technologies for peppers (chili) face significant bottlenecks characterized by immature methods for precise flower detection and pose estimation, the need for optimized specialized end-effector designs, and insufficient robustness in decision-making systems under dynamic environmental conditions. To address these issues, future development should prioritize constructing diverse, large-scale flower image and pose datasets while developing detection algorithms adaptable to complex environments to achieve high-precision identification. Additionally, implementing this system requires a hierarchical architecture where perception drives adaptive actuation. Deep learning models must localize flower targets and assess maturity in real-time, feeding coordinates to path planners that generate collision-free trajectories through foliage. These trajectories are executed via multimodal motion control, synchronizing the rigid manipulator with soft end-effectors. By embedding tactile feedback into the machine learning loop, the system creates a unified sensorimotor framework. This enables dynamic force modulation based on physical resistance, ensuring precise, non-destructive pollination tailored to chili plants.</p>
</abstract>
<kwd-group>
<kwd>artificial intelligence</kwd>
<kwd>breeding</kwd>
<kwd>end effector</kwd>
<kwd>path planning</kwd>
<kwd>pepper</kwd>
<kwd>precision pollination</kwd>
<kwd>visual perception</kwd>
</kwd-group>
<funding-group>
<award-group id="gs1">
<funding-source id="sp1">
<institution-wrap>
<institution>National Key Research and Development Program of China</institution>
<institution-id institution-id-type="doi" vocab="open-funder-registry" vocab-identifier="10.13039/open_funder_registry">10.13039/501100012166</institution-id>
</institution-wrap>
</funding-source>
<award-id rid="sp1">2021YFD1600300-406, 2022YFD2002003-3</award-id>
</award-group>
<award-group id="gs2">
<funding-source id="sp2">
<institution-wrap>
<institution>Key Research and Development Program of Hunan Province of China</institution>
<institution-id institution-id-type="doi" vocab="open-funder-registry" vocab-identifier="10.13039/open_funder_registry">10.13039/501100019091</institution-id>
</institution-wrap>
</funding-source>
<award-id rid="sp2">2025JK2028</award-id>
</award-group>
<award-group id="gs3">
<funding-source id="sp3">
<institution-wrap>
<institution>Hunan Provincial Innovation Foundation for Postgraduate</institution>
<institution-id institution-id-type="doi" vocab="open-funder-registry" vocab-identifier="10.13039/open_funder_registry">10.13039/501100010083</institution-id>
</institution-wrap>
</funding-source>
<award-id rid="sp3">CX20251050</award-id>
</award-group>
<funding-statement>The author(s) declared that financial support was received for this work and/or its publication. Details of all funding sources should be provided, including grant numbers if applicable. Please ensure to add all necessary funding information, as after publication this is no longer possible. This project was supported by the National Key R&amp;D Program of China (2021YFD1600300-406; 2022YFD2002003-3), Key R&amp;D Program of Hunan Province (2025JK2028), and the Hunan Provincial Innovation Foundation for Postgraduate (CX20251050).</funding-statement>
</funding-group>
<counts>
<fig-count count="11"/>
<table-count count="3"/>
<equation-count count="0"/>
<ref-count count="103"/>
<page-count count="18"/>
<word-count count="8274"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Technical Advances in Plant Science</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>Chili pepper represents a cornerstone of protected agriculture in China, with cultivation spanning a vast geographic expanse from Xinjiang in the west to Shanghai in the east, and extending from Hainan Island in the south to Heilongjiang in the north. As a vegetable crop ranking among the global leaders in both cultivation area and consumption volume (<xref ref-type="bibr" rid="B1">Zou et&#xa0;al., 2020</xref>), the production of peppers has evolved significantly through advanced planting techniques and expanded acreage. Modern facilities, including large-scale greenhouses and solar greenhouses, have gained widespread adoption due to their superior economic efficiency compared to open-field farming. These controlled environments optimize plant growth conditions by regulating temperature and humidity, ensuring frost resistance, and providing optimal illumination, thereby enabling continuous, year-round production (<xref ref-type="bibr" rid="B2">Zhao et&#xa0;al., 2023</xref>).</p>
<p>In the context of agricultural modernization, seeds are often likened to the &#x201c;semiconductors&#x201d; of agriculture; consequently, the quality of pepper seeds is fundamental to enhancing crop quality, augmenting yields, and fostering industrial advancement. Pollination serves as a critical juncture in pepper seed production and fruit development, directly influencing seed formation and overall fruit quality (<xref ref-type="bibr" rid="B3">Lin et&#xa0;al., 2023</xref>). Effective pollination significantly improves fruit set rates and ensures that plants produce a higher quantity of superior fruits under identical growth conditions (<xref ref-type="bibr" rid="B4">Tian et&#xa0;al., 2019</xref>). However, current pollination practices rely predominantly on manual labor, a method fraught with challenges such as high labor intensity, recruitment difficulties, low efficiency, and inconsistent operation quality (<xref ref-type="bibr" rid="B5">Zhang and Zhang, 2015</xref>; <xref ref-type="bibr" rid="B6">Gu et&#xa0;al., 2018</xref>). Therefore, investigating mechanized precision pollination technologies and developing specialized equipment for peppers provides essential technical support for the revitalization of the seed industry and addresses core technological bottlenecks in germplasm resources.</p>
<p>In the broader context of solanaceous crops, <xref ref-type="bibr" rid="B7">Zhang et&#xa0;al. (2024)</xref> conducted an extensive assessment of precision pollination robotics for greenhouse tomatoes to analyze current advancements and prospective trends. Their work scrutinized target recognition technologies for tomato blossoms and evaluated various end-effector configurations (<xref ref-type="bibr" rid="B8">Zhao et&#xa0;al., 2022</xref>). Furthermore, they identified existing obstacles in end-effector research, proposed remedial strategies, and examined the feasibility of liquid spray pollination technology. While this research offers valuable insights for greenhouse tomatoes, chili presents unique requirements. Although peppers are hermaphroditic, self-pollinating crops, they typically rely on wind and insect vectors to facilitate pollen release and transfer in natural settings. In enclosed greenhouse environments, physical barriers such as shade nets and plastic films obstruct these natural pathways, resulting in inadequate pollination that adversely impacts fruit set, yield, and quality (<xref ref-type="bibr" rid="B9">Shen and Hao, 2004</xref>; <xref ref-type="bibr" rid="B10">Lin et&#xa0;al., 2017</xref>). The rapid expansion of greenhouse farming, compounded by rural labor shortages and an aging agricultural workforce, necessitates the broad implementation of artificial intelligence-driven target identification By facilitating intelligent, automated, and efficient pollination processes, robotic systems mitigate the limitations of manual operation and pave the way for enhanced pepper productivity and quality.</p>
<p>To provide a comprehensive understanding of the latest advancements and future trends in this field, this paper systematically analyzes essential technologies for the precision pollination of facility fruits and vegetables. This review examines the comparative advantages and limitations of various pollination systems, highlights key challenges specific to facility-based pepper pollination, and proposes focused solutions and innovative strategies. The objective is to serve as a foundational reference for researchers in the sector, thereby advancing the evolution of China&#x2019;s facility pepper industry toward more intelligent and efficient practices. <xref ref-type="fig" rid="f1"><bold>Figure&#xa0;1</bold></xref> illustrates the organizational framework of the key mechanical pollination technologies and operational systems discussed herein.</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>Content organization framework of key mechanical pollination technologies and operational systems.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1778541-g001.tif">
<alt-text content-type="machine-generated">Flowchart illustrating key technologies for robotic pollination, covering environmental perception, bionic end-effector design, and manipulator path planning. Subsections detail tasks such as flower detection, bionic end-effector classification, and path planning, each followed by analysis, summary, and evaluation, ultimately supporting the construction of a pepper robotic pollination system, with further steps on perception and agronomy integration.</alt-text>
</graphic></fig>
</sec>
<sec id="s2">
<label>2</label>
<title>Global research advances in flower object detection and pose estimation</title>
<p>In the development of precision pollination systems for greenhouse-cultivated peppers, the accurate detection of flowers and the precise assessment of their orientations constitute the primary and most critical prerequisites (<xref ref-type="bibr" rid="B11">Tang et&#xa0;al., 2012</xref>). However, achieving this in a greenhouse environment presents substantial challenges for the design and motion control of pollination equipment. These difficulties stem from variable lighting conditions, diverse plant morphologies, and the intrinsic characteristics of pepper flowers, which include their diminutive size, dense distribution, severe occlusion by foliage, overlapping blossoms, and varied orientations. The precision of detection and pose estimation directly dictates the accuracy and operational efficiency of the pollination machinery (<xref ref-type="bibr" rid="B12">Zhang et&#xa0;al., 2023</xref>). As illustrated in <xref ref-type="fig" rid="f2"><bold>Figure&#xa0;2</bold></xref>, the complex growth environments of greenhouse crops such as peppers and tomatoes are characterized by diverse flower poses and significant occlusion between flowers and leaves. These factors exacerbate the difficulty for visual recognition systems to identify targets swiftly and precisely (<xref ref-type="bibr" rid="B13">Long et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B14">Chen and Meng, 2025</xref>; <xref ref-type="bibr" rid="B15">Liu et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B16">Kuang et&#xa0;al., 2025a</xref>; <xref ref-type="bibr" rid="B53">2025b</xref>; <xref ref-type="bibr" rid="B54">2025c</xref>). Consequently, achieving efficient and accurate identification of pepper flowers within facility agriculture environments remains a significant bottleneck that necessitates urgent resolution to propel the advancement of robotic precision pollination technology.</p>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>Facility pepper cultivation and data collection scenario.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1778541-g002.tif">
<alt-text content-type="machine-generated">A young person standing in a greenhouse bends over to take a close-up photograph of green plants growing in pots, with sunlight casting clear shadows and a rural landscape visible outside.</alt-text>
</graphic></fig>
<sec id="s2_1">
<label>2.1</label>
<title>Machine vision-based flower object detection methods</title>
<p>In the nascent stages of flower detection research, methodologies predominantly relied on conventional visual analysis techniques, focusing primarily on chromatic attributes for identification. Image processing techniques, particularly segmentation algorithms, were employed to isolate flower regions from the background based on color patterns and their proportional distribution within the image. However, this approach possesses inherent limitations as it relies exclusively on color as a single dimension. Given the prevalence of similar hues among various floral species in nature, identification based solely on color often lacks precision. To enhance recognition accuracy, it is essential to integrate not only chromatic attributes but also morphological factors, such as flower size, shape, and distinctive textural or structural features. These integrated features establish a comprehensive foundation for recognition, thereby significantly improving the accuracy and robustness of the system.</p>
<p>Several studies illustrate the evolution of these methods. <xref ref-type="bibr" rid="B17">Feng et&#xa0;al. (2013)</xref> introduced a machine vision-based technique for acquiring growth data of flower seedlings, employing binary image segmentation to categorize seedlings as absent, substandard, or superior, achieving an accuracy rate exceeding 87%. However, this method struggled with interference between seedlings and cotyledons, leading to misclassification and compromised detection accuracy. Similarly, <xref ref-type="bibr" rid="B18">Dorj et&#xa0;al. (2013)</xref> developed a computer vision-based binary method to detect and enumerate citrus blossoms under natural lighting for yield estimation. By employing Gaussian filters to mitigate noise and adjust lighting, the algorithm attained an accuracy of 80.55% across all trees. Although 1,340 sub-images of citrus blooms were identified from 21 trees, the approach remained sensitive to illumination variations and noise, resulting in elevated error rates.</p>
<p>In another approach, <xref ref-type="bibr" rid="B19">Yang et&#xa0;al. (2022)</xref> proposed a color template matching system for the non-destructive assessment of Phalaenopsis orchids. By capturing multi-angle images via rotation, the method extracted flower areas to evaluate blooming degree and quality. Despite its intent, the method demonstrated a significant error margin ranging from 0.7% to 64.8% when applied to images from various viewing angles. Furthermore, the requirement to rotate potted plants to capture comprehensive data rendered the detection process complex and limited its practical utility. Conversely, <xref ref-type="bibr" rid="B20">Cui et&#xa0;al. (2019)</xref> innovatively employed an improved K-means clustering algorithm for the efficient segmentation of strawberry images. Combined with morphological processing and connected component analysis, this method accurately delineated flower regions. <xref ref-type="bibr" rid="B21">Ohi et&#xa0;al. (2018)</xref> proposed a hybrid strategy integrating long-range and short-range detection. The long-range phase utilized cameras for initial localization and color classifiers for coarse segmentation to swiftly identify putative floral patches. The short-range phase employed RGB-D cameras with real-time dense Simultaneous Localization and Mapping (SLAM) technology to reconstruct strawberry plants in 3D, achieving an overall recognition accuracy of 78.6%. Nevertheless, this technique remained vulnerable to interference from similar-looking flowers, resulting in suboptimal overall efficacy.</p>
<p>In summary, conventional machine vision techniques are often characterized by low detection accuracy, inadequate resistance to interference, and insufficient robustness. Conversely, deep learning, particularly the application of Convolutional Neural Networks (CNNs) (<xref ref-type="bibr" rid="B22">Krizhevsky et al., 2012</xref>), has significantly enhanced floral recognition accuracy. These algorithms autonomously learn intricate crop characteristics and minimize classification errors while exhibiting remarkable resilience to complex growth conditions, occlusions, and environmental disturbances. Due to advantages in automated feature extraction, precision, robustness, and computational efficiency, deep learning is progressively supplanting traditional machine vision methods in flower recognition applications.</p>
</sec>
<sec id="s2_2">
<label>2.2</label>
<title>Method of flower object detection and pose estimation based on machine learning</title>
<p>In contrast to conventional machine vision algorithms, machine learning-based solutions for flower object detection and pose estimation more accurately replicate human recognition patterns. CNNs serve as a pivotal technology for extracting high-level features, enabling the direct derivation of sophisticated semantic information from raw images. This end-to-end perception process obviates the need for laborious manual feature engineering. Consequently, neural networks exhibit remarkable adaptability to intricate and dynamic environments, representing the future direction for developing robust and intelligent vision systems (<xref ref-type="bibr" rid="B23">Paul et&#xa0;al., 2025</xref>). Recent advancements in high-performance computing have further accelerated the deployment of large-scale neural network algorithms on robotic arms, dramatically enhancing their efficacy in object detection and pose estimation. Significant milestones have been achieved in domains including fruit and vegetable flower recognition, spatial localization, and complex phenotypic reconstruction (<xref ref-type="bibr" rid="B23">Paul et al., 2025</xref>; <xref ref-type="bibr" rid="B94">Xiong et al., 2025</xref>; <xref ref-type="bibr" rid="B24">Huang et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B25">Kamata et&#xa0;al., 2018</xref>).</p>
<p>Regarding flower object detection, deep learning techniques exhibit superior robustness and adaptability compared to traditional methods based on color difference, color space, or clustering (<xref ref-type="bibr" rid="B26">Xie et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B27">Qiu et&#xa0;al., 2022</xref>). They successfully surmount obstacles such as vegetation occlusion and lighting fluctuations, thereby surpassing traditional methods in both recognition precision and processing efficiency. Leveraging the end-to-end nature of deep learning streamlines the design and execution of recognition algorithms, which improves deployment efficiency (<xref ref-type="bibr" rid="B28">Peng et&#xa0;al., 2024</xref>). Moreover, by optimizing network architectures and loss functions specific to the growth traits of fruit and vegetable flowers, and by incorporating attention mechanisms, researchers can enhance precision and real-time performance while minimizing computational demands.</p>
<p>For the computation of pollination poses, deploying an end-to-end deep detection network facilitates the identification of safe pollination zones, thereby preventing floral damage or rigid collisions (<xref ref-type="bibr" rid="B30">Eyles et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B31">Ge et&#xa0;al., 2019</xref>). Semantic segmentation and instance segmentation models are utilized to delineate the spatial and sequential data of pollination sites within complex backgrounds, enabling robots to execute precise and efficient pollination strategies (<xref ref-type="bibr" rid="B32">Ishita et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B33">Hao et&#xa0;al., 2023</xref>). Furthermore, geometric correlations between flowers and pedicels, derived from high-level semantic information, provide essential references for the structural design of end-effectors in pollination systems (<xref ref-type="bibr" rid="B34">Gen&#xe9;-Mola et&#xa0;al., 2020</xref>).</p>
<p>In the realm of 3D reconstruction for pollination targets, utilizing advanced feature recognition via deep learning offers distinct advantages over the low-level feature detection of classical machine vision, particularly in improving the efficiency and stability of point cloud registration (<xref ref-type="bibr" rid="B35">Wang et&#xa0;al., 2019</xref>). Deep Generative Adversarial Networks (GANs) have proven effective in recovering missing surfaces in incomplete floral point clouds, thus enhancing reconstruction completeness and precision (<xref ref-type="bibr" rid="B36">Salim et&#xa0;al., 2023</xref>). Furthermore, since deep learning training necessitates substantial sample sizes, high-quality training datasets must encompass diverse perspectives, lighting conditions, scales, fruit shapes, and occlusion scenarios (<xref ref-type="bibr" rid="B37">Deng et&#xa0;al., 2020a</xref>), represented through various modalities including images, point clouds, voxels, and time series. To guarantee that network performance satisfies feature extraction criteria, it is imperative to conduct comprehensive deployment and testing in real-world scenarios (<xref ref-type="bibr" rid="B98">Chu et al., 2025</xref>; <xref ref-type="bibr" rid="B37">Deng et al., 2020</xref>). Beyond basic metrics such as accuracy and real-time performance, evaluating stability and scene adaptability is essential (<xref ref-type="bibr" rid="B38">Fan et&#xa0;al., 2023</xref>). <xref ref-type="fig" rid="f3"><bold>Figure&#xa0;3</bold></xref> demonstrates the efficacy of machine learning in detecting fruit and vegetable flowers and estimating their poses, while <xref ref-type="table" rid="T1"><bold>Table&#xa0;1</bold></xref> provides a comprehensive summary of relevant research data.</p>
<fig id="f3" position="float">
<label>Figure&#xa0;3</label>
<caption>
<p>Schematic of fruit and vegetable flower object detection based on machine learning. <bold>(a)</bold> Monkey peach blossom target detection. <bold>(b)</bold> Detection of different flowering periods in tomatoes. <bold>(c)</bold> Apple blossom target detection. <bold>(d)</bold> Pear blossom object detection. <bold>(e1)</bold>. Apple blossom data annotation. <bold>(e2)</bold>. Apple flower instance segmentation. <bold>(f)</bold> Citrus flower semantic segmentation. <bold>(g)</bold> Tomato flowering period recognition and posture estimation.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1778541-g003.tif">
<alt-text content-type="machine-generated">Compilation of annotated images and graphics showing various machine learning tasks in flower and fruit blossom detection, segmentation, annotation, and analysis for species such as monkey peach, tomato, apple, pear, and citrus, labeled with bounding boxes, segmentation outlines, and probability scores.</alt-text>
</graphic></fig>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Recent research on machine learning-based fruit and vegetable flower object detection and pose estimation.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="left">Serial number</th>
<th valign="middle" align="center">Authors</th>
<th valign="middle" align="center">For crops</th>
<th valign="middle" align="center">Methods</th>
<th valign="middle" align="center">For tasks</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="center">1</td>
<td valign="middle" align="center">Deng et&#xa0;al (<xref ref-type="bibr" rid="B37">Deng et&#xa0;al., 2020</xref>)</td>
<td valign="middle" align="center">Citrus flower</td>
<td valign="middle" align="center">Mask R-CNN</td>
<td valign="middle" align="center">instance segmentation</td>
</tr>
<tr>
<td valign="middle" align="center">2</td>
<td valign="middle" align="center">Fanet al (<xref ref-type="bibr" rid="B38">Fan et&#xa0;al., 2023</xref>)</td>
<td valign="middle" align="center">honeysuckle</td>
<td valign="middle" align="center">YOLOv5s+EfficientNet+ CARAFE</td>
<td valign="middle" align="center">object detection</td>
</tr>
<tr>
<td valign="middle" align="center">3</td>
<td valign="middle" align="center">Gong et&#xa0;al (<xref ref-type="bibr" rid="B40">Gong et&#xa0;al., 2023</xref>)</td>
<td valign="middle" align="center">macaque peach</td>
<td valign="middle" align="center">YOLOv5s+C3HB+CCA</td>
<td valign="middle" align="center">object detection</td>
</tr>
<tr>
<td valign="middle" align="center">4</td>
<td valign="middle" align="center">Si et&#xa0;al (<xref ref-type="bibr" rid="B41">Si et&#xa0;al., 2024</xref>)</td>
<td valign="middle" align="center">Apple blossom</td>
<td valign="middle" align="center">YOLOv5s + C-CoTCSP+ RFB+ VariFocal Loss</td>
<td valign="middle" align="center">object detection</td>
</tr>
<tr>
<td valign="middle" align="center">5</td>
<td valign="middle" align="center">Mu et&#xa0;al (<xref ref-type="bibr" rid="B44">Mu et&#xa0;al., 2023</xref>)</td>
<td valign="middle" align="center">Multiple flowers</td>
<td valign="middle" align="center">SSD</td>
<td valign="middle" align="center">object detection</td>
</tr>
<tr>
<td valign="middle" align="center">6</td>
<td valign="middle" align="center">Shang et al (<xref ref-type="bibr" rid="B43">Shang et&#xa0;al., 2022</xref>)</td>
<td valign="middle" align="center">Apple blossom</td>
<td valign="middle" align="center">YOLOv5s</td>
<td valign="middle" align="center">object detection</td>
</tr>
<tr>
<td valign="middle" align="center">7</td>
<td valign="middle" align="center">Shang et&#xa0;al (<xref ref-type="bibr" rid="B43">Shang et&#xa0;al., 2022</xref>)</td>
<td valign="middle" align="center">Apple blossom</td>
<td valign="middle" align="center">YOLOv5s+ ShuffleNetv2 + Ghost Module</td>
<td valign="middle" align="center">object detection</td>
</tr>
<tr>
<td valign="middle" align="center">8</td>
<td valign="middle" align="center">Qiu et&#xa0;al (<xref ref-type="bibr" rid="B27">Qiu et&#xa0;al., 2022</xref>)</td>
<td valign="middle" align="center">Apple blossom</td>
<td valign="middle" align="center">YOLOv4</td>
<td valign="middle" align="center">object detection</td>
</tr>
<tr>
<td valign="middle" align="center">9</td>
<td valign="middle" align="center">Mu et&#xa0;al (<xref ref-type="bibr" rid="B44">Mu et&#xa0;al., 2023</xref>)</td>
<td valign="middle" align="center">Apple blossom</td>
<td valign="middle" align="center">RCNN</td>
<td valign="middle" align="center">semantic segmentation</td>
</tr>
<tr>
<td valign="middle" align="center">10</td>
<td valign="middle" align="center">Chen et&#xa0;al (<xref ref-type="bibr" rid="B45">Chen et&#xa0;al., 2022</xref>)</td>
<td valign="middle" align="center">Tomato flower</td>
<td valign="middle" align="center">YOLOv5s+C2f_ScConv+ LSKA +ADown</td>
<td valign="middle" align="center">object detection</td>
</tr>
<tr>
<td valign="middle" align="center">11</td>
<td valign="middle" align="center">Yue et&#xa0;al (<xref ref-type="bibr" rid="B46">Yue et&#xa0;al., 2024</xref>)</td>
<td valign="middle" align="center">Soybean flower</td>
<td valign="middle" align="center">YOLOv5+CA</td>
<td valign="middle" align="center">object detection</td>
</tr>
<tr>
<td valign="middle" align="center">12</td>
<td valign="middle" align="center">Sebastian et&#xa0;al (<xref ref-type="bibr" rid="B47">Estrada et&#xa0;al., 2024</xref>)</td>
<td valign="middle" align="center">peach blossom</td>
<td valign="middle" align="center">YOLOv7x</td>
<td valign="middle" align="center">object detection</td>
</tr>
<tr>
<td valign="middle" align="center">13</td>
<td valign="middle" align="center">Wang et&#xa0;al (<xref ref-type="bibr" rid="B48">Wang et&#xa0;al., 2022</xref>)</td>
<td valign="middle" align="center">Ihwa</td>
<td valign="middle" align="center">YOLOv4+ SENet + ShuffleNetv2</td>
<td valign="middle" align="center">object detection</td>
</tr>
<tr>
<td valign="middle" align="center">14</td>
<td valign="middle" align="center">Xu et&#xa0;al (<xref ref-type="bibr" rid="B49">Xu et&#xa0;al., 2022</xref>)</td>
<td valign="middle" align="center">Tomato flower</td>
<td valign="middle" align="center">YOLOv3</td>
<td valign="middle" align="center">object detection</td>
</tr>
<tr>
<td valign="middle" align="center">15</td>
<td valign="middle" align="center">Sun et&#xa0;al (<xref ref-type="bibr" rid="B50">Sun et&#xa0;al., 2021</xref>)</td>
<td valign="middle" align="center">Apple, Peach, Pear Blossom</td>
<td valign="middle" align="center">DeepLab-ResNet</td>
<td valign="middle" align="center">semantic segmentation</td>
</tr>
<tr>
<td valign="middle" align="center">16</td>
<td valign="middle" align="center">Yu et&#xa0;al (<xref ref-type="bibr" rid="B51">Yu et&#xa0;al., 2022</xref>)</td>
<td valign="middle" align="center">Tomato flower</td>
<td valign="middle" align="center">YOLOv5s+EfficientNet</td>
<td valign="middle" align="center">attitude estimation</td>
</tr>
<tr>
<td valign="middle" align="center">17</td>
<td valign="middle" align="center">Zhang et&#xa0;al (<xref ref-type="bibr" rid="B7">Zhang et&#xa0;al., 2024</xref>)</td>
<td valign="middle" align="center">tomato</td>
<td valign="middle" align="center">YOLOv5s</td>
<td valign="middle" align="center">attitude estimation</td>
</tr>
<tr>
<td valign="middle" align="center">18</td>
<td valign="middle" align="center">Kuang et&#xa0;al. (<xref ref-type="bibr" rid="B16">Kuang et&#xa0;al., 2025a</xref>; <xref ref-type="bibr" rid="B53">2025b</xref>)</td>
<td valign="middle" align="center">Chili</td>
<td valign="middle" align="center">YOLOv8n+YOLOv5n</td>
<td valign="middle" align="center">object detection</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s2_3">
<label>2.3</label>
<title>Analysis, summary, and evaluation</title>
<p>The emergence of machine learning technologies in the field of agricultural visual perception has precipitated a paradigm shift in object detection applications, particularly for the identification of fruits, vegetables, and flowers. Through the continuous optimization of algorithmic architectures, the implementation of deep, multi-layered network designs, and rigorous training with specialized floral datasets, these methodologies have effectively surmounted challenges that baffled conventional techniques. These challenges include the accurate recognition of variable flower postures and the limitations in precision inherent to traditional image processing. Deep learning models exhibit remarkable robustness, stability, and applicability even when confronted with complex environmental disturbances, including dense foliage occlusion, variable illumination conditions, color overlap, and background noise. By leveraging potent feature extraction capabilities, these networks efficiently address multifaceted issues in agricultural vision and demonstrate significant potential for applications such as robotic pollination.</p>
<p>However, it is imperative to acknowledge that such sophisticated feature extraction mechanisms typically demand extensive annotated datasets and substantial computational power. In the context of facility agriculture, these resources are often constrained. Consequently, researchers are prioritizing the development of lightweight network architectures to alleviate the dependency on massive training data and high-performance computing hardware. Concurrently, advancements in data augmentation techniques are being continuously refined to enhance feature learning efficiency and model parameterization. These innovations facilitate the training of high-quality deep neural networks even under small-sample conditions, which is critical for the practical deployment of deep learning solutions in resource-constrained agricultural environments. In contrast to conventional target detection methods, deep learning-based approaches integrate algorithmic innovations with image preprocessing techniques to thoroughly extract high-dimensional semantic features of chili blossoms, thereby significantly elevating both recognition efficiency and accuracy.</p>
</sec>
</sec>
<sec id="s3">
<label>3</label>
<title>Pollination mechanisms and end-effector design</title>
<sec id="s3_1">
<label>3.1</label>
<title>Domestic and international pollination methods and their characteristics</title>
<p>Vision-based precise perception lays the foundation for efficient physical pollination, and end-effectors, as critical components that translate perceptual data into physical operations, have seen their design innovation and optimization become a central research focus. The rapid advancement of mobile robotics has extended its applicability across multiple dimensions of modern agriculture, establishing intelligent robots as a pivotal catalyst for enhancing agricultural automation. Within this context, pollination robots represent a critical solution, with research increasingly focusing on the innovation and optimization of their end-effector designs.</p>
<p>Current designs for fruit and vegetable pollination end-effectors are diverse, integrating various physical, biological, and chemical methodologies. Physical methods predominantly include spray-based techniques (<xref ref-type="bibr" rid="B57">Zou et&#xa0;al., 2023</xref>), which involve the direct application of pollinating media; vibration-assisted methods (<xref ref-type="bibr" rid="B58">Chechetka et&#xa0;al., 2017</xref>), which utilize mechanical oscillation to facilitate pollen dispersal; and contact-dipping approaches (<xref ref-type="bibr" rid="B59">Wu et&#xa0;al., 2008</xref>), which simulate manual pollination motions to apply pollen with precision. Specifically, spray-based techniques facilitate pollination through the direct application of the medium, vibration-assisted methods augment pollen distribution using mechanical vibrations, and contact-dipping methods replicate manual efforts to promote efficiency through precise application. These physical methods have demonstrated tangible efficacy in real-world applications and are continually refined as technology progresses.</p>
<p>In addition to physical approaches, biological methods may employ specific agents or mechanisms to enhance pollination, such as biomimetic strategies that imitate the natural behaviors of bumblebees (<xref ref-type="bibr" rid="B60">Yao et&#xa0;al., 2020</xref>). CChemical approaches may entail the use of substances designed to stimulate or potentiate the pollination process (<xref ref-type="bibr" rid="B61">Shimizu and Sato, 2018</xref>). These varied methodologies offer distinct technological avenues for improving pollination efficiency and success rates. When developing end-effectors for pollination robots, researchers must evaluate the structural principles and intended applications of these actuators, while considering their conventional use in various fruit and vegetable harvesting contexts. The design and performance specifications are inevitably influenced by the diversity of crop varieties and the complexity of unstructured agricultural environments. Consequently, the design of end-effectors necessitates a comprehensive evaluation of mechanical, electronic, material, and biological factors to ensure efficient, precise, and crop-safe operations. This section details the configuration of end-effectors for six types of facility-based mechanical pollination systems, analyzing and contrasting their operating principles, benefits, and drawbacks. The <xref ref-type="table" rid="T2"><bold>Table&#xa0;2</bold></xref> Mechanical devices for fruit and vegetable pollination, their applicable scenarios, and the crops they are designed for. And <xref ref-type="table" rid="T3"><bold>Table&#xa0;3</bold></xref> provides a comparative examination of these end-effector types.</p>
<table-wrap id="T2" position="float">
<label>Table&#xa0;2</label>
<caption>
<p>Mechanical devices for fruit and vegetable pollination, their applicable scenarios, and the crops they are designed for.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="left">Pollination device</th>
<th valign="middle" align="left">Applicable scenarios</th>
<th valign="middle" align="left">For crops</th>
<th valign="middle" align="left">End effector structure</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">rotor UAV pollination<break/>(<xref ref-type="bibr" rid="B501">Li et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B65">Zhang et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B58">Chechetka et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B66">Gao et&#xa0;al., 2023</xref>)</td>
<td valign="middle" align="left">field</td>
<td valign="middle" align="left">paddy</td>
<td valign="middle" align="left">pneumatic</td>
</tr>
<tr>
<td valign="middle" align="left">Blackberry Pollination Robot (<xref ref-type="bibr" rid="B21">Ohi et&#xa0;al., 2018</xref>)</td>
<td valign="middle" align="left">greenhouse</td>
<td valign="middle" align="left">blackberry</td>
<td valign="middle" align="left">contact dipping</td>
</tr>
<tr>
<td valign="middle" align="left">mimicry of pollination</td>
<td valign="middle" align="left">greenhouse</td>
<td valign="middle" align="left">tomato</td>
<td valign="middle" align="left">pneumatic</td>
</tr>
<tr>
<td valign="middle" align="left">buttercup pollination (<xref ref-type="bibr" rid="B67">Ding et&#xa0;al., 2014</xref>)</td>
<td valign="middle" align="left">greenhouse</td>
<td valign="middle" align="left">lily</td>
<td valign="middle" align="left">bee mimic</td>
</tr>
<tr>
<td valign="middle" align="left">Ultrasonic Pollination Robot (<xref ref-type="bibr" rid="B63">Li et&#xa0;al., 2014</xref>; <xref ref-type="bibr" rid="B66">Gao et&#xa0;al., 2023</xref>)</td>
<td valign="middle" align="left">greenhouse</td>
<td valign="middle" align="left">strawberry</td>
<td valign="middle" align="left">ultrasonic</td>
</tr>
<tr>
<td valign="middle" align="left">A Robot for Pollination of Kiwifruit (<xref ref-type="bibr" rid="B67">Ding et&#xa0;al., 2014</xref>)</td>
<td valign="middle" align="left">orchard</td>
<td valign="middle" align="left">Kiwifruit</td>
<td valign="middle" align="left">atomizing</td>
</tr>
<tr>
<td valign="middle" align="left">Air-fertilizer for kiwifruit (<xref ref-type="bibr" rid="B69">Kempe and Gils, 2011</xref>; <xref ref-type="bibr" rid="B70">Tacconi et&#xa0;al., 2016</xref>)</td>
<td valign="middle" align="left">orchard</td>
<td valign="middle" align="left">Kiwifruit</td>
<td valign="middle" align="left"><italic>pneumatic</italic></td>
</tr>
<tr>
<td valign="middle" align="left">Kiwi Powder Sprayer (<xref ref-type="bibr" rid="B71">Dropcopter, 2022</xref>)</td>
<td valign="middle" align="left">orchard</td>
<td valign="middle" align="left">Kiwifruit</td>
<td valign="middle" align="left">atomizing</td>
</tr>
<tr>
<td valign="middle" align="left">Kiwifruit atomizer (<xref ref-type="bibr" rid="B76">Yuan et&#xa0;al., 2016</xref>)</td>
<td valign="middle" align="left">orchard</td>
<td valign="middle" align="left">Kiwifruit</td>
<td valign="middle" align="left"><italic>pneumatic</italic></td>
</tr>
<tr>
<td valign="middle" align="left">Helicopter Pollinator (<xref ref-type="bibr" rid="B73">Yang et&#xa0;al., 2023</xref>)</td>
<td valign="middle" align="left">field</td>
<td valign="middle" align="left">paddy</td>
<td valign="middle" align="left">atomizing</td>
</tr>
<tr>
<td valign="middle" align="left">Tomato Hormone Pollination Robot (<xref ref-type="bibr" rid="B74">Wen et&#xa0;al., 2022</xref>)</td>
<td valign="middle" align="left">greenhouse</td>
<td valign="middle" align="left">tomato</td>
<td valign="middle" align="left">atomizing</td>
</tr>
<tr>
<td valign="middle" align="left">Flower of Forsythia suspensa mechanical arm pollination (<xref ref-type="bibr" rid="B75">Anon, 2022</xref>)</td>
<td valign="middle" align="left">greenhouse</td>
<td valign="middle" align="left">Forsythia suspensa Vahl</td>
<td valign="middle" align="left">contact dipping</td>
</tr>
<tr>
<td valign="middle" align="left">Tomato Pollination Manipulator (<xref ref-type="bibr" rid="B72">Li et&#xa0;al., 2026</xref>)</td>
<td valign="middle" align="left">industrial factory</td>
<td valign="middle" align="left">tomato</td>
<td valign="middle" align="left">atomizing</td>
</tr>
</tbody>
</table>
</table-wrap>
<table-wrap id="T3" position="float">
<label>Table&#xa0;3</label>
<caption>
<p>Structure of the end effector of the fruit and vegetable pollination robotic arm.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">End effector structure</th>
<th valign="middle" align="center">Principle</th>
<th valign="middle" align="center">Advantage</th>
<th valign="middle" align="center">Disadvantage</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="center">Airborne (<xref ref-type="bibr" rid="B62">Shimizu et&#xa0;al., 2015</xref>)</td>
<td valign="middle" align="left">By intermittently blowing air into the flowers, the vibrations are induced to simulate natural wind or artificial shaking for pollination.</td>
<td valign="middle" align="left">Prevent the asynchronous pollination of fruits and vegetables and save resources and environmental protection</td>
<td valign="middle" align="center">incomplete pollination</td>
</tr>
<tr>
<td valign="middle" align="center">spray type<break/> (<xref ref-type="bibr" rid="B57">Zou et&#xa0;al., 2023</xref>)</td>
<td valign="middle" align="left">By mixing the pollen with water or other medium to form a suspension, which is then evenly sprayed onto the crop flowers using a sprayer, the natural pollination process is simulated.</td>
<td valign="middle" align="center">Efficient and labor-saving</td>
<td valign="middle" align="center">Polluting the environment, wasting the powder</td>
</tr>
<tr>
<td valign="middle" align="center">mimicry of bumblebee flight<break/> (<xref ref-type="bibr" rid="B60">Yao et&#xa0;al., 2020</xref>)</td>
<td valign="middle" align="left">The pollination process is accomplished by imitating the sharp sound of the hornet&#x2019;s flight when collecting pollen to produce vibrations.</td>
<td valign="middle" align="center">Environmentally friendly, labor-saving</td>
<td valign="middle" align="center">high cost, pollen loss</td>
</tr>
<tr>
<td valign="middle" align="center">Ultrasonic<break/> (<xref ref-type="bibr" rid="B63">Li et&#xa0;al., 2014</xref>)</td>
<td valign="middle" align="left">The process of pollination is completed by the vibration of ultrasonic waves to make the pollen detach from the stamen and spread to the stigma of the pistil.</td>
<td valign="middle" align="center">High pollination efficiency and no flower damage</td>
<td valign="middle" align="center">High cost and easy to damage flowers</td>
</tr>
<tr>
<td valign="middle" align="center">Oscillating<break/><sup>(</sup><xref ref-type="bibr" rid="B56">Wang et&#xa0;al., 2013</xref><sup>)</sup></td>
<td valign="middle" align="left">Pollination is achieved by using a vibrating device at a specific frequency to dislodge pollen from the stamen and disperse it to the stigma of the pistil.</td>
<td valign="middle" align="center">Save labor, work<break/>Wide range, eco-friendly</td>
<td valign="middle" align="center">Uneven pollination may cause flower damage</td>
</tr>
<tr>
<td valign="middle" align="center">Contact dip</td>
<td valign="middle" align="left">The precise pollination method involves transferring pollen to the stigma of the crop plant using physical tools such as brushes, cotton swabs, or small brushes.</td>
<td valign="middle" align="center">Environmentally friendly and highly accurate</td>
<td valign="middle" align="center">High labor intensity, low efficiency</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Categories of pollination execution agencies</title>
<p>The safety and efficiency of agricultural pollination depend significantly on the synergy between apiculture and mechanical pollination technology. When selecting pollination methods, it is crucial to consider the biological traits of crops and their natural habitats. While natural wind typically suffices for self-pollinating crops, insect pollination is generally more efficacious for cross-pollinating crops or within confined environments. In the specific context of greenhouse pepper farming, where peppers are self-pollinating vegetables, the primary pollination methods include insects, wind, and manual techniques. Although each method possesses distinct advantages and constraints, all aim to improve pollination efficiency and success rates. Currently, manual pollination remains the predominant technique, whereas automated pollination acts as an essential adjunct when natural conditions are limited. Even under optimal natural conditions, the integration of apiculture and mechanical methods can optimize the process, with economic viability being a primary consideration. Consequently, the research and development of robotic pollination methods for <italic>chili</italic> are imperative. To address diverse pollination requirements, the development of specialized pollination machinery and end-effectors is essential. These devices must adeptly execute numerous functions, including accurately identifying pollen transfer locations, administering pollen by dipping and spreading, and facilitating pollen dissemination. The precise implementation of these actions is vital for guaranteeing superior pollination quality. <xref ref-type="table" rid="T2"><bold>Table&#xa0;2</bold></xref> delineates the research and application contexts for several fruit and vegetable crops regarding their suitable pollination techniques and end-effectors.</p>
<p>As illustrated in <xref ref-type="table" rid="T2"><bold>Table&#xa0;2</bold></xref>, robotic pollination in field crops generally utilizes pneumatic-assisted methods to replicate natural wind forces (<xref ref-type="bibr" rid="B55">Chang et al., 2023</xref>; <xref ref-type="bibr" rid="B68">Suming et al., 2015</xref>). However, for orchard crops, employing rotary-wing aircraft to simulate wind-assisted pollination is often unfeasible due to extensive tree canopies and broad planting intervals. The asynchronous flowering cycles of many fruit tree species further complicate this process. To draw analogies and demonstrate potential technologies that can be adapted for pepper pollination For cross-pollinating crops such as apples, pears, and kiwifruits, where ample pollen can be harvested and preserved, air-blowing or atomization methods are typically employed. In greenhouses, where spatial constraints exist, pollination apparatus must be engineered to be compact and miniaturized. <xref ref-type="fig" rid="f4"><bold>Figures&#xa0;4</bold></xref>&#x2013;<xref ref-type="fig" rid="f6"><bold>6</bold></xref> depict machine learning-based solutions for various pollination scenarios and devices for fruit and vegetable blossoms, respectively.</p>
<fig id="f4" position="float">
<label>Figure&#xa0;4</label>
<caption>
<p>Schematic diagram of the effect of mechanical pollination device for greenhouse crops. <bold>(a)</bold> BlackBerry pollination robot. <bold>(b)</bold> Ultrasonic pollination robots. <bold>(c)</bold> Tomato flower pollination robotic arm. <bold>(d)</bold> Imitating the pollination behavior of male bees. <bold>(e)</bold> Forsythia flower pollination robotic arm. <bold>(f)</bold> A mechanical arm for dipping and pollinating greenhouse tomatoes.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1778541-g004.tif">
<alt-text content-type="machine-generated">Six labeled panels show various robotic pollination technologies: a mobile BlackBerry pollination robot in a greenhouse, ultrasonic pollination robots with a laptop controlling them, a robotic arm for tomato flower pollination, a device that mimics the pollination behavior of male bees, a forsythia flower pollination robotic arm with detailed component labeling, and a mechanical arm performing steps of dipping and pollinating greenhouse tomatoes.</alt-text>
</graphic></fig>
<fig id="f5" position="float">
<label>Figure&#xa0;5</label>
<caption>
<p>Schematic diagram of the effect of the field crop mechanical pollination device. <bold>(a)</bold> Dropper copter drone pollination. <bold>(b)</bold> Rotorcraft for rice pollination in the field.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1778541-g005.tif">
<alt-text content-type="machine-generated">Panel a shows a close-up of a multi-rotor drone equipped with pollination mechanisms hovering over a field. Panel b displays a small helicopter-type drone flying above a dense rice field, demonstrating aerial pollination.</alt-text>
</graphic></fig>
<fig id="f6" position="float">
<label>Figure&#xa0;6</label>
<caption>
<p>Schematic diagram of the effect of mechanical pollination device for orchard crops. <bold>(a)</bold> Tomato pollinator. <bold>(b)</bold> Handheld pneumatic powder sprayer. <bold>(c)</bold> Mobile kiwi fruit air-circulation powder sprayer. <bold>(d)</bold> Mobile kiwi fruit atomization sprayer.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1778541-g006.tif">
<alt-text content-type="machine-generated">a. Close-up of a person using a metal tomato pollinator tool to pollinate tomato flowers on a plant in a garden setting. b. Light green handheld pneumatic powder sprayer with buttons, a trigger, and a small container attached on a dark surface. c. Mobile kiwi fruit air-circulation powder sprayer attached to a vehicle, with multiple hoses and spray nozzles aligned in a row, placed outdoors on grass. d. Mobile kiwi fruit atomization sprayer mounted on a vehicle, spraying mist among kiwi plants in an orchard or field.</alt-text>
</graphic></fig>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Analysis, summary and evaluation</title>
<p>A comprehensive examination of advancements in domestic and international pollination machinery and end-effector designs reveals several significant findings and future opportunities. Contemporary pollination devices exhibit exceptional ingenuity and adaptability through the integration of physical, biological, and chemical methodologies. The incorporation of deep learning into agricultural visual perception, alongside progress in mechanical pollination technology, has markedly improved the intelligence and precision of pollination robots. Nevertheless, these systems continue to encounter obstacles regarding flexibility, accuracy, and cost-effectiveness, particularly within intricate and dynamic agricultural settings.</p>
<p>Future research should concentrate on augmenting the intelligence of pollination equipment by expanding environmental awareness, optimizing decision-making processes, and upgrading adaptive control systems. Concurrently, it is essential to investigate more efficient and cost-effective pollination technologies, such as the integration of artificial intelligence with natural pollination methods to enhance both efficiency and quality. Moreover, with the advancement of sustainable and precision agriculture, research on pollination machinery must consider environmental impacts and resource efficiency to promote the ecological transformation of agricultural production. Through interdisciplinary collaboration and technological innovation, the field can promote extensive application and widespread adoption of advanced pollination machinery (<xref ref-type="bibr" rid="B64">Jiyu et al., 2017</xref>; <xref ref-type="bibr" rid="B42">Shang et al., 2024</xref>).</p>
</sec>
</sec>
<sec id="s4">
<label>4</label>
<title>Research progress on pollination strategies and motion control</title>
<sec id="s4_1">
<label>4.1</label>
<title>Pollination strategies</title>
<p>An effective and adaptable pollination control strategy is essential for addressing the complex sequence planning required for mobile pollination robots in greenhouse pepper cultivation. By utilizing machine learning algorithms to identify and locate various target flowers and their orientations, it becomes possible to determine the precise centroid of each blossom. Consequently, an optimized pollination strategy can be devised, enabling the generation of optimal trajectory paths (<xref ref-type="bibr" rid="B87">Ni et al., 2025</xref>).</p>
<p><xref ref-type="bibr" rid="B21">Ohi et&#xa0;al. (2018)</xref> developed a robot named BrambleBee to address pollination challenges in thorny vegetation. This system integrated technologies for recognition, trajectory planning, and motion regulation. Building upon BrambleBee, the team subsequently introduced the six-arm pollinating robot Stickbug in 2022, which significantly improved pollination efficiency. Similarly, <xref ref-type="bibr" rid="B76">Yuan et&#xa0;al. (2016)</xref> developed a tomato pollination robot utilizing a four-degree-of-freedom (4-DOF) robotic arm, achieving accurate detection and collision-free motion control. <xref ref-type="bibr" rid="B66">Gao et&#xa0;al. (2023)</xref> engineered a kiwifruit pollination robot featuring an end-effector equipped with a nozzle and a recovery mechanism, which markedly reduced pollen loss. In this context, redundant robotic arms have emerged as a significant area of research due to their proficiency in obstacle avoidance within intricate environments. <xref ref-type="bibr" rid="B77">Baur et&#xa0;al. (2012)</xref> engineered a modular redundant robotic arm capable of versatile mobility. In a 2023 study, <xref ref-type="bibr" rid="B78">Colucci et&#xa0;al. (2023)</xref> decoupled the motion of redundant robots by treating the mobile chassis as a distinct degree of freedom. Furthermore, <xref ref-type="bibr" rid="B79">Schuetz et&#xa0;al. (2015)</xref> proposed a unified planning approach for redundant degrees of freedom in robotic arms, facilitating advancements in obstacle avoidance for complex agricultural settings (<xref ref-type="bibr" rid="B97">Wu and Fang, 2025</xref>; <xref ref-type="bibr" rid="B29">Tao et al., 2024</xref>).</p>
<p>To optimize efficiency and guarantee high success rates, pollination robots must traverse all target flowers via the most direct route. This necessitates the ability to recognize flowers within the visual field, strategically devise efficient paths, and synchronize robotic arm movements. <xref ref-type="bibr" rid="B80">Wei et&#xa0;al. (2024)</xref> introduced an innovative Redundant Cooperative Control (RCC) strategy to address challenges associated with conventional greenhouse tomato pollination, such as high labor costs, environmental concerns regarding chemical use, and fluctuating bee populations. The RCC technique partitions the robot&#x2019;s workspace into subspaces, solves the Traveling Salesman Problem (TSP) for each zone, and synchronizes end-effector and redundant movements to ensure accurate task transitions. As depicted in <xref ref-type="fig" rid="f7"><bold>Figure&#xa0;7</bold></xref>, the methodology encompasses one-dimensional and multi-dimensional RCC techniques, outlining the initial, operational, and final phases of path planning and execution. By leveraging the redundant motion capabilities of both the chassis and the arm, this strategy significantly enhances efficiency. Validation studies on a pollination robot platform demonstrated an average rate of 7.5 seconds per flower, representing a 36.4% improvement over prevalent intermittent strategies.</p>
<fig id="f7" position="float">
<label>Figure&#xa0;7</label>
<caption>
<p>RCC pollination strategy and its three stages. <bold>(a)</bold> RCC pollination strategy; <bold>(b)</bold> three stages of RCC pollination strategy, including (a) initial stage, (b) running stage, and (c) final stage.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1778541-g007.tif">
<alt-text content-type="machine-generated">Diagram illustrating two pollination strategies for a robot in a workspace; the top section compares the RCC strategy with a moving workspace to an intermittence strategy, while the bottom section presents three stages—initial, running, and final—of the RCC pollination strategy using labeled workspaces, taskspaces, and robotic components with arm and chassis movement directions indicated.</alt-text>
</graphic></fig>
</sec>
<sec id="s4_2">
<label>4.2</label>
<title>Path planning and motion control</title>
<p>Motion control is a cornerstone of robotic pollination, incorporating sophisticated technologies including machine learning, path planning, and kinematic control to facilitate autonomous positioning and accurate operation in unstructured environments like greenhouses. This process simulates manual pollination motions to generate robotic arm trajectories and force control profiles, ensuring both operational efficiency and high success rates.</p>
<p><xref ref-type="bibr" rid="B81">Li et&#xa0;al. (2022)</xref> developed a novel ground robot to serve as an autonomous pollinator for kiwifruit orchards, verifying its performance under real field conditions. The study utilized the YOLOv4 algorithm alongside transfer learning to achieve precise identification of kiwifruit flowers and buds (<xref ref-type="bibr" rid="B83">Li et al., 2022a</xref>; <xref ref-type="bibr" rid="B82">2022b</xref>; <xref ref-type="bibr" rid="B3">Lin et al., 2023</xref>;<xref ref-type="bibr" rid="B85">Ahmad et&#xa0;al., 2024</xref>). In subsequent experiments, <xref ref-type="bibr" rid="B93">Li et&#xa0;al. (2025</xref>; <xref ref-type="bibr" rid="B72">2026) </xref> innovatively assessed flower opening orientation via center-of-gravity measurement. This advancement enabled the precise positioning of robotic arms, ensuring accurate alignment with the pistils and petal curves. Conversely, <xref ref-type="bibr" rid="B84">Strader et&#xa0;al. (2019)</xref> developed the BrambleBee unmanned ground vehicle, which focused on specific flower orientations rather than encompassing all potential directional states. To address the need for automated pollination amidst declining bee populations, <xref ref-type="bibr" rid="B502">Khubaib et&#xa0;al. (2021)</xref> introduced a vision-guided servo control technique. As illustrated in <xref ref-type="fig" rid="f8"><bold>Figure&#xa0;8</bold></xref>, this method uses deep learning to detect bloom size and orientation, converting these metrics into depth data to drive a visual servo platform. A six-degree-of-freedom (6-DOF) control system is then utilized to execute the pollination movements with high precision.</p>
<fig id="f8" position="float">
<label>Figure&#xa0;8</label>
<caption>
<p>Path planning and obstacle-avoidance motion control of pepper flower pollination manipulator: <bold>(a)</bold> Initial task status; <bold>(b&#x2013;g)</bold> middle states of the robotic arm during the movement process (from stage 1 to 6); <bold>(h)</bold> final state of the robotic arm.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1778541-g008.tif">
<alt-text content-type="machine-generated">Sequential photographic series showing a robotic arm interacting with a plant in a pot. Each of eight labeled frames captures different arm positions, progressing from initial status through various middle states to the final state, illustrating the robot’s movement and manipulation task.</alt-text>
</graphic></fig>
<p>Artificial intelligence (AI) and robotics offer substantial prospects for advancing precision agriculture. However, intrinsic crop characteristics, such as floral structures with concave stigmas, limit the application of robots in breeding processes. <xref ref-type="bibr" rid="B88">Xie et&#xa0;al. (2025)</xref> introduced a crop-robot co-design strategy termed &#x201c;Genome Editing and AI Robotics&#x201d; (GEAIR) to overcome this bottleneck. This approach involves creating male-sterile tomato lines with exposed stigmas via genome editing, paired with mobile robots trained for automated stigma recognition. The system, depicted in <xref ref-type="fig" rid="f9"><bold>Figure&#xa0;9</bold></xref>, achieved efficiency comparable to manual techniques in F1 hybrid breeding. This strategy not only validates automated hybrid breeding but also demonstrates the potential of merging AI with gene editing to accelerate the development of climate-resilient crops.</p>
<fig id="f9" position="float">
<label>Figure&#xa0;9</label>
<caption>
<p>Collaborative design strategy of tomato robot pollination based on genome editing and artificial intelligence robot synergy. The left panel illustrates the integrated breeding and pollination pipeline. The right panel presents the robotic system and navigation details: <bold>(A)</bold> key components of the GEAIR robotic pollinator, including (1) carrier, (2) UWB gateways/flag, (3) pollination arm, (4) pollination gripper, (5) camera, (6) pollen brush, and (7) pollen container; <bold>(B, C)</bold> photos of the robot operating in the greenhouse environment; <bold>(D, E)</bold> schematic diagrams of the UWB-based navigation path and greenhouse layout.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1778541-g009.tif">
<alt-text content-type="machine-generated">Infographic and diagrams illustrating an AI and robotics-driven breeding process for crops. The left section shows steps including genome editing for flower morphology, use of robot arms and artificial intelligence for cross-pollination, and robotic breeding factories for accelerated breeding. The right panels display a labeled diagram of a GEAIR robotic pollinator (A), photos of the robot operating among potted plants (B, C), a diagram of plant row navigation (D), and a schematic of a greenhouse layout with enlargement on plant rows (E).</alt-text>
</graphic></fig>
</sec>
<sec id="s4_3">
<label>4.3</label>
<title>Analysis, summary and evaluation</title>
<p>The ongoing agricultural technology revolution has introduced innovative and effective pollination systems for fruit trees, such as kiwifruit. Selective flower recognition and liquid pollination technology have emerged as key achievements, markedly improving pollination accuracy and efficiency via precision intermittent spraying tactics (<xref ref-type="bibr" rid="B89">Liu et&#xa0;al., 2023</xref>). Nonetheless, current fruit and vegetable pollination robots encounter obstacles, including their cumbersome dimensions and limitations in seamless integration inside protected agricultural settings. Concurrently, the swift advancement of household spray technology presents viable remedies to this problem (<xref ref-type="bibr" rid="B90">Ma et al., 2025</xref>; <xref ref-type="bibr" rid="B91">Oubounyt et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B92">Ferreira et&#xa0;al., 2023</xref>). <xref ref-type="bibr" rid="B85">Ahmad et&#xa0;al. (2024)</xref> developed accurate and robust pollination methods for watermelons using intelligence-guided visual serving, which is a sophisticated motion control strategy (<xref ref-type="bibr" rid="B93">Li et&#xa0;al. (2025)</xref>; <xref ref-type="bibr" rid="B72">Li et&#xa0;al., 2026</xref>). focused on the design and performance verification of an intelligent pollination robot specifically for facility tomatoes, inherently addressing the motion and control required for the task. <xref ref-type="bibr" rid="B95">Hao et&#xa0;al. (2026)</xref> created a clip-assisted flower detection and wind-compensated precision liquid pollination robot for kiwifruit orchards, directly incorporating a compensation strategy into the motion control system to handle environmental disturbances. Furthermore, <xref ref-type="bibr" rid="B89">Liu et&#xa0;al. (2023)</xref> contributed to the upstream perception required for control by researching tomato flower pollination features recognition based on binocular gray value-deformation coupled template matching, which is essential for guiding the robot&#x2019;s motion (<xref ref-type="bibr" rid="B100">Zhang et&#xa0;al. (2025)</xref>; <xref ref-type="bibr" rid="B99">Akdo&#x11f;an et&#xa0;al., 2025</xref>). provided a comprehensive review of autonomous flower pollination techniques, discussing the progress, challenges, and future directions of various control and strategy methodologies. While many papers concentrate on detection, such as <xref ref-type="bibr" rid="B81">Li et&#xa0;al. (2022)</xref> work on deep learning-based kiwifruit flower recognition, and Li G. et&#xa0;al.&#x2019;s real-time detection of kiwifruit flower and bud using YOLOv4, these perception capabilities form the indispensable foundation for the robot&#x2019;s subsequent motion planning and strategic execution during the pollination process. Broader contextual control is also evident (<xref ref-type="bibr" rid="B101">Jiang and Ahamed, 2025</xref>), as seen in Chen A (<xref ref-type="bibr" rid="B102">Chen and Huang, 2025</xref>). and <xref ref-type="bibr" rid="B103">Huang et&#xa0;al. (2025)</xref> work on integrating reinforcement learning and large language models for crop production process management optimization and <bold>control. Conversely,</bold> the pepper sector predominantly depends on conventional hand powder-dipping techniques for pollination, which are ineffective and expensive. To resolve this, we can investigate the integration of kiwifruit&#x2019;s spray pollination theory with manual pollination methods for tomatoes. By examining the distinct development traits and pollination needs of crops such as peppers and tomatoes, we can enhance flower target detection algorithms for improved pollination management. The implementation of a redundant collaborative control variable spraying approach utilizing RCC (Redundant Collaborative Control) at pepper stamens facilitates precise, variable spraying that optimizes pollination while markedly diminishing chemical residues and pollen waste. This method exhibits superior environmental efficacy and ensures accurate pollination for peppers.</p>
</sec>
</sec>
<sec id="s5">
<label>5</label>
<title>Current challenges and solutions</title>
<p>Pollination technology, serving as a pivotal auxiliary innovation facilitating the advancement of simplified facility agriculture, has remained nascent and constrained within China&#x2019;s greenhouse pepper industry. Currently, the majority of regions depend primarily on conventional manual pollination techniques, supplemented by natural vectors such as bees and wind. This reliance is problematic due to seasonal fluctuations, intricate internal facility conditions, and elevated labor costs and intensity, all of which hinder the intelligent transformation of greenhouse pepper pollination. Contemporary research has largely concentrated on accurate flower identification and pose estimation for various fruit and vegetable crops, aiming to improve the detection precision of pollination targets. However, there is a distinct lack of research regarding the design of mechanical pollination end-effectors and control strategies specifically for chili.</p>
<p>Efficient pollination management solutions are required to resolve the sequencing challenges faced by mobile pollination robots in greenhouse pepper cultivation. These strategies must employ machine vision recognition algorithms to identify and locate various target flowers and their orientations, ascertain the centroids of pollination targets, and devise optimal path planning for efficient operation. Furthermore, the design of end-effectors is essential for enhancing operational precision and adaptability. Precise mechanical design and sophisticated control algorithms enable gentle contact and accurate pollination of pepper flowers without inflicting damage. The precision and rationality of these elements directly influence the accuracy and operational efficiency of the pollination machinery.</p>
<p>In the domain of facility agriculture, the exact identification, classification, and pose estimation of chili flower buds, as well as the precise positioning and regulation of pollination mechanisms, constitute significant hurdles in the progression of robotic precision pollination technology (<xref ref-type="bibr" rid="B86">Xu et al., 2023</xref>; <xref ref-type="bibr" rid="B7">Zhang et al., 2024</xref>; <xref ref-type="bibr" rid="B89">Liu et al., 2023</xref>). Investigating precision pollination approaches for chili in controlled agricultural environments possesses substantial practical significance for enhancing seed production technology and offering essential theoretical support. A comprehensive examination of these technologies is crucial for fostering innovation and advancement in chili seed production.</p>
<sec id="s5_1">
<label>5.1</label>
<title>Current challenges</title>
<p>The viability of essential pollination technologies for chili peppers necessitates further investigation and refinement. The pollination process requires exceptional accuracy and sensitivity, encompassing precise flower identification, effective pollen transmission, and successful pollination outcomes. These requirements present considerable hurdles for flower target detection and pose estimation algorithms. Potential difficulties include false positives and missed detections resulting from low recognition accuracy, as well as diminished pollination efficiency attributable to inadequate precision. Furthermore, design deficiencies in recognition algorithms and end-effectors for pollination robotic arms often precipitate a cascade of issues, either directly or indirectly. These issues result in heightened pollen loss during successive pollination events and may inflict unnecessary physical harm to chili plants, thereby negatively impacting yield and quality. Consequently, ongoing enhancement and innovation in flower recognition algorithms and end-effector design are essential for improving overall pollination efficacy and practical application effectiveness. Ultimately, pollination strategies are crucial for the mechanical pollination of chili peppers, as they influence success rates and consistency while directly impacting seed quality and yield. Efficient pollination tactics can optimize path planning, potentially doubling results with reduced effort while conserving time and augmenting total equipment efficacy. Existing major methods for facility-based chili pollination continue to encounter the following obstacles:</p>
<sec id="s5_1_1">
<label>5.1.1</label>
<title>Flower recognition and pose estimation: limited robustness in adverse lighting and occlusion scenarios</title>
<p>Current techniques for detecting fertilized flowers predominantly depend on color image data obtained from binocular depth vision systems. These approaches extract and identify pepper flowers by assessing saturation, hue, chromatic attributes, and dimensional data. They utilize stereo vision technology to ascertain the 3D spatial coordinates of target flowers for positioning. However, in the intricate and variable production environments of contemporary agricultural facilities, existing recognition algorithms for pepper pollination encounter severe constraints under fluctuating lighting and occlusion conditions. This results in generally poor recognition rates (typically around 80% accuracy), frequent false positives, and missed detections, highlighting the necessity for improved robustness. Additionally, the low frame rate of detection processes leads to reduced processing speeds, extending the entire pollination workflow. This inefficiency fails to satisfy contemporary agricultural requirements for efficient and mechanized pollination, thereby limiting extensive adoption in large-scale production.</p>
</sec>
<sec id="s5_1_2">
<label>5.1.2</label>
<title>Waste of pollen resources and operational inefficiency</title>
<p>The efficacy of pepper pollination relies on both precise dosage management and the configuration of the pollination apparatus. Current spray and contact pollination systems partially fulfill the fundamental criteria for intelligent pollination and effectively tackle the issue of asynchronous flowering on the same plant. However, they inadequately regulate pollen application rates, resulting in resource inefficiency. Furthermore, the comparatively slow operation speed of current end-effectors extends the entire procedure, diminishing overall efficiency. Although the initial investment in robotic pollination systems is high, the rising labor costs in rural China and the aging of the agricultural workforce are rapidly reducing the economic viability of manual pollination. Therefore, the long-term adoption of robotic pollination has strategic advantages; by achieving continuous operation and standardized precision, these systems can significantly reduce unit seed production costs and mitigate the risks associated with seasonal labor shortages.</p>
</sec>
<sec id="s5_1_3">
<label>5.1.3</label>
<title>Restricted applicability and lack of versatility</title>
<p>A deficiency in adaptability continues to be a significant obstacle in the advancement of agricultural machinery. Numerous current designs are restricted to specific environmental conditions or crop varieties, thereby constraining the utility of end-effectors. Due to the varied pollination mechanisms and morphological traits of different plants, contemporary pollination robots frequently encounter difficulties in adapting to changes in working conditions or target crops, leading to diminished efficiency or total failure. Pepper pollination robots in facilities encounter comparable constraints in adaptation. Nonetheless, ongoing developments in science and technology are anticipated to yield increasingly versatile pollination robots in the future. These advanced devices will adapt to various working situations and crop needs, providing more efficient and convenient pollination options for agricultural production.</p>
</sec>
<sec id="s5_1_4">
<label>5.1.4</label>
<title>Reliance on manual pollination techniques</title>
<p>Research on intelligent pollination systems for greenhouse peppers in China is presently in its nascent stages. The design of end-effectors primarily concentrates on two categories: spray (pneumatic) systems and contact dip techniques. Despite notable advancements in spray pollination technology across other domestic sectors, this sophisticated method has not been extensively implemented for greenhouse pepper pollination. Pepper pollination robots encounter several technical challenges and practical obstacles in real-world applications, including the need to enhance recognition accuracy, optimize pollination efficiency, improve environmental adaptability, and increase versatility. Consequently, to effectively address the intricate requirements of the current planting environment, it remains essential to persistently enhance research and development initiatives, refine pertinent technologies, and facilitate the maturation and dissemination of mechanical pollination technology for chili peppers.</p>
</sec>
</sec>
<sec id="s5_2">
<label>5.2</label>
<title>Solution strategies and methods</title>
<p>The primary and most critical objective of mechanical pepper pollination is the accurate detection of targets and the estimation of flower poses. Consequently, it is imperative to construct a comprehensive and diverse dataset for pepper flower detection and pose estimation. This data foundation must thoroughly encompass images of pepper blossoms across various growth phases, lighting conditions, angles, and occlusion scenarios to guarantee the dataset&#x2019;s comprehensiveness and representativeness. Based on these dataset characteristics, high-performance target detection algorithms, such as Convolutional Neural Networks (e.g., YOLO) or their enhanced deep learning variants, should be meticulously selected and trained to achieve accurate identification and pose estimation. This approach will elevate detection accuracy and improve the algorithm&#x2019;s adaptability and robustness to complex environmental changes. The proposed device will incorporate several advanced technologies to create a scientifically robust and efficient operational system. It will utilize sophisticated machine learning methodologies for the intelligent recognition and assessment of pepper growth status; implement optimized Redundant Collaborative Control (RCC) pollination strategies to ensure precision and efficiency; integrate advanced path planning algorithms to facilitate autonomous navigation in intricate agricultural settings, avoiding collisions and optimizing pollination routes; and combine high-precision motion control technology to achieve seamless and accurate pollination operations.</p>
<p>Secondly, a customized mechanical pollination apparatus for chili peppers must be designed by amalgamating agricultural technology with crop-specific growth patterns. Despite China&#x2019;s considerable advancements in liquid atomization research through thorough investigation of spray pollination technology, its application in greenhouse pepper cultivation remains nascent. Although spray machinery technology continues to develop, practical application encounters several challenges, including structural incompatibility with pepper flower anatomy, incongruity between spray droplet size and pollination needs, and difficulties in achieving precise timing control. These constraints impede the extensive implementation of spray technology in liquid pollination processes, especially in greenhouse agriculture where such technology is notably deficient, underscoring the pressing necessity for technological advancement. The proposed device will utilize advanced machine learning techniques via sophisticated image recognition and processing abilities to ensure precise flower identification and localization. It will also integrate RCC pollination methodologies to guarantee precision and efficacy. The technology utilizes advanced path planning algorithms to autonomously optimize pollination paths according to field or greenhouse layouts, thereby reducing redundant tasks and enhancing efficiency. Motion control technology ensures consistent performance in intricate surroundings, sustaining precise operations in both flat farms and structurally complex greenhouses.</p>
<p>To address these issues, this study leveraged the growth characteristics of greenhouse pepper plants. As illustrated in <xref ref-type="fig" rid="f10"><bold>Figure&#xa0;10</bold></xref>, SolidWorks was employed to create a model of a pepper plant (<xref ref-type="fig" rid="f10"><bold>Figure&#xa0;10a</bold></xref>) and a model of a cultivation scenario (<xref ref-type="fig" rid="f11"><bold>Figure&#xa0;11b</bold></xref>). Furthermore, as shown in <xref ref-type="fig" rid="f11"><bold>Figure&#xa0;11</bold></xref>, the incorporation of a spray-based pollination end-effector with a six-axis robotic arm facilitates the accurate pollination of pepper plants. This innovative method, integrating modern spray technology with precision robotic arms, aims to improve pollination efficiency and uniformity while decreasing labor intensity and economic costs, providing a novel solution for pepper pollination in greenhouse agriculture.</p>
<fig id="f10" position="float">
<label>Figure&#xa0;10</label>
<caption>
<p>Facility pepper plant and planting scenario model. <bold>(a)</bold> Solidworks model of a chili plant. <bold>(b)</bold> Model of chili plant cultivation scenarios.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1778541-g010.tif">
<alt-text content-type="machine-generated">SolidWorks digital illustration shows a single chili plant, then the same plant model placed in a pot. Adjacent is an array of potted chili plant models arranged in evenly spaced rows on a brown rectangular surface.</alt-text>
</graphic></fig>
<fig id="f11" position="float">
<label>Figure&#xa0;11</label>
<caption>
<p>Application scenarios for pepper pollination using facilities. <bold>(a)</bold> Pollination scenarios. <bold>(b)</bold> Pollination and local illustrations.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-17-1778541-g011.tif">
<alt-text content-type="machine-generated">Two side-by-side digital illustrations show a robotic arm mounted on a wheeled platform facing a potted flowering plant. The left panel depicts the robot in an idle position near the plant. The right panel demonstrates the robot extending its arm to pollinate the plant, illustrated by a close-up inset showing pollen dispersal onto a flower.</alt-text>
</graphic></fig>
</sec>
</sec>
<sec id="s6">
<label>6</label>
<title>Summary and outlook</title>
<p>Looking ahead, there is compelling evidence to anticipate that mechanized precision pollination and seed production technologies will achieve widespread implementation across a broader spectrum of crop species. The synergistic integration of deep learning with pose estimation algorithms presents immense potential and value for the detection of crop flowers. By leveraging the robust feature extraction capabilities of deep learning alongside the precise spatial and directional predictions provided by pose estimation, it is possible to significantly elevate the levels of automation and intelligence in agriculture. This is particularly relevant for critical operations such as flower pollination and fruit harvesting. Such breakthroughs not only contribute to safeguarding global vegetable production and fostering sustainable agricultural development but also serve as pivotal technologies propelling the modernization of the agricultural sector.</p>
<p>The evolution of crop pollination apparatus and methodologies is progressively moving toward intelligence and precision to mitigate the challenges posed by declining natural pollinator populations. Future advancements in this domain will integrate artificial intelligence, machine learning, and robotics to drive innovative transformations within the industry. The development and deployment of these technologies are poised to exert a profound influence on the sustainable growth of global agriculture.</p>
<p>To augment the coordination efficiency within the workflow encompassing perception, decision-making, and execution for pollination robots, researchers must focus on two critical areas. First, it is essential to conduct rigorous analyses of the distribution characteristics of target objects within agricultural facilities. Incorporating these attributes as supplementary constraints into algorithms for visual identification, path planning, and motion control will facilitate the creation of specialized algorithms tailored to pollination environments. This approach optimizes the coordination mechanisms between the robot&#x2019;s perception, decision-making, and execution modules. Second, a comprehensive performance evaluation framework must be established to assess the continuous operational state of robotic arms in orchard or greenhouse settings. This system should not only evaluate individual modules, such as visual systems, path planning, and navigation, but also formulate comprehensive metrics that assess the entire perception, decision, and execution workflow. These metrics will quantify the synergistic effects between modules and the overall operational efficacy of the robot, thereby meeting future requirements for extensive system testing. These measures will assist researchers in developing autonomous robotic systems that exhibit superior performance, reduced costs, and greater applicability for agricultural practitioners.</p>
<p>Addressing the aforementioned technical challenges is paramount for the advancement of research on precision pollination technologies for greenhouse peppers. Resolving these issues is of critical importance for promoting the practical application of pepper flower detection, pose estimation, and end-effector technologies, ultimately elevating the intelligence level of facility-based pepper pollination.</p>
</sec>
</body>
<back>
<sec id="s7" sec-type="author-contributions">
<title>Author contributions</title>
<p>MK: Conceptualization, Funding acquisition, Writing &#x2013; original draft, Data curation. XJL: Investigation, Writing &#x2013; review &amp; editing, Visualization, Formal analysis. FX: Project administration, Conceptualization, Supervision, Writing &#x2013; review &amp; editing. XuZ: Writing &#x2013; review &amp; editing, Investigation, Validation, Formal analysis. YX: Writing &#x2013; review &amp; editing, Resources, Funding acquisition. YZ: Funding acquisition, Supervision, Methodology, Writing &#x2013; review &amp; editing, Resources. DL: Writing &#x2013; review &amp; editing, Supervision, Validation. XiZ: Writing &#x2013; review &amp; editing, Resources, Funding acquisition, Supervision. XL: Project administration, Writing &#x2013; review &amp; editing.</p></sec>
<sec id="s9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The author(s) declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p></sec>
<sec id="s10" sec-type="ai-statement">
<title>Generative AI statement</title>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us. The author(s) declared that generative AI was used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p></sec>
<sec id="s11" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p></sec>
<ref-list>
<title>References</title>
<ref id="B85">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ahmad</surname> <given-names>K.</given-names></name>
<name><surname>Park</surname> <given-names>J. E.</given-names></name>
<name><surname>Ilyas</surname> <given-names>T.</given-names></name>
<name><surname>Lee</surname> <given-names>J. H.</given-names></name>
<name><surname>Lee</surname> <given-names>J. H.</given-names></name>
<name><surname>Kim</surname> <given-names>S.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Accurate and robust pollinations for watermelons using intelligence guided visual servoing</article-title>. <source>Comput. Electron. Agric.</source> <volume>219</volume>, <fpage>108753</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.108753</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B99">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Akdo&#x11f;an</surname> <given-names>C.</given-names></name>
<name><surname>&#xd6;zer</surname> <given-names>T.</given-names></name>
<name><surname>O&#x11f;uz</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>PP-YOLO: Deep learning based detection model to detect apple and cherry trees in orchard based on Histogram and Wavelet preprocessing techniques</article-title>. <source>Comput. Electron. Agric.</source> <volume>232</volume>, <fpage>110052</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2025.110052</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B75">
<mixed-citation publication-type="web">
<person-group person-group-type="author"><collab>Anon</collab>
</person-group> (<year>2022</year>). <source>Researchers develop new robot pollinator to assist in greenhouse</source>. Available online at: <uri xlink:href="https://www.hortidaily.com/article/9368317/researchers-develop-new-robotpollinator-to-assist-in-greenhouse/">https://www.hortidaily.com/article/9368317/researchers-develop-new-robotpollinator-to-assist-in-greenhouse/</uri> (Accessed <date-in-citation content-type="access-date">May 25, 2022</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B77">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Baur</surname> <given-names>J.</given-names></name>
<name><surname>Pfaff</surname> <given-names>J.</given-names></name>
<name><surname>Ulbrich</surname> <given-names>H.</given-names></name>
<name><surname>Villgrattner</surname> <given-names>T.</given-names></name>
</person-group> (<year>2012</year>). &#x201c;
<article-title>Design and development of a redundant modular multipurpose agricultural manipulator</article-title>,&#x201d; in <conf-name>2012 IEEE/ASME International Conference on Advanced Intelligent Mechatronics (AIM)</conf-name>. (<publisher-loc>Wollongong, Australia</publisher-loc>: 
<publisher-name>IEEE</publisher-name>), <fpage>823</fpage>&#x2013;<lpage>830</lpage>.
</mixed-citation>
</ref>
<ref id="B55">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chang</surname> <given-names>R.</given-names></name>
<name><surname>Qu</surname> <given-names>X.</given-names></name>
<name><surname>Shi</surname> <given-names>F.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Targeted deposition of peach pollen particles based on aerosol particle size regulation</article-title>. <source>J. Northwest A&amp;F Univ. (Nat. Sci. Ed.)</source> <volume>51</volume>, <fpage>130</fpage>&#x2013;<lpage>138</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.13207/j.cnki.jnwafu.2023.01.015</pub-id>
</mixed-citation>
</ref>
<ref id="B58">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chechetka</surname> <given-names>S. A.</given-names></name>
<name><surname>Yu</surname> <given-names>Y.</given-names></name>
<name><surname>Tange</surname> <given-names>M.</given-names></name>
<name><surname>Miyako</surname> <given-names>E.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>Materially engineered artificial pollinators</article-title>. <source>Chem</source> <volume>2</volume>, <fpage>224</fpage>&#x2013;<lpage>239</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.chempr.2017.01.008</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B14">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chen</surname> <given-names>C.</given-names></name>
<name><surname>Meng</surname> <given-names>Q.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Image processing-based fruit recognition in greenhouse tomatoes</article-title>. <source>Agric. Mech. Res.</source> <volume>47</volume>, <fpage>189</fpage>&#x2013;<lpage>193</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.13427/j.issn.1003-188X.2025.01.030</pub-id>
</mixed-citation>
</ref>
<ref id="B102">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chen</surname> <given-names>D.</given-names></name>
<name><surname>Huang</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Integrating reinforcement learning and large language models for crop production process management optimization and control through a new knowledge-based deep learning paradigm</article-title>. <source>Comput. Electron. Agric.</source> <volume>232</volume>, <fpage>110028</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2025.110028</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B45">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chen</surname> <given-names>G.</given-names></name>
<name><surname>Chen</surname> <given-names>Z.</given-names></name>
<name><surname>Wang</surname> <given-names>Y.</given-names></name>
<name><surname>Wang</surname> <given-names>J.</given-names></name>
<name><surname>Fan</surname> <given-names>G.</given-names></name>
<name><surname>Li</surname> <given-names>H.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Research on apple flower detection method based on data-enhanced deep learning</article-title>. <source>J. Chin. Agric. Mech.</source> <volume>43</volume>, <fpage>148</fpage>&#x2013;<lpage>155</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.13733/j.jcam.issn.2095-5553.2022.05.022</pub-id>
</mixed-citation>
</ref>
<ref id="B98">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chu</surname> <given-names>M.</given-names></name>
<name><surname>Si</surname> <given-names>Y.</given-names></name>
<name><surname>Li</surname> <given-names>Q.</given-names></name>
<name><surname>Liu</surname> <given-names>X.</given-names></name>
<name><surname>Liu</surname> <given-names>G.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Deep learning-based model to classify mastitis in Holstein dairy cows</article-title>. <source>Biosyst. Eng.</source> <volume>252</volume>, <fpage>92</fpage>&#x2013;<lpage>104</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2025.02.013</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B78">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Colucci</surname> <given-names>G.</given-names></name>
<name><surname>Tagliavini</surname> <given-names>L.</given-names></name>
<name><surname>Botta</surname> <given-names>A.</given-names></name>
<name><surname>Baglieri</surname> <given-names>L.</given-names></name>
<name><surname>Quaglia</surname> <given-names>G.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Decoupled motion planning of a mobile manipulator for precision agriculture</article-title>. <source>Robotica</source> <volume>41</volume>, <fpage>1872</fpage>&#x2013;<lpage>1887</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1017/S0263574723000243</pub-id>, PMID: <pub-id pub-id-type="pmid">41694064</pub-id>
</mixed-citation>
</ref>
<ref id="B20">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Cui</surname> <given-names>M.</given-names></name>
<name><surname>Chen</surname> <given-names>S.</given-names></name>
<name><surname>Li</surname> <given-names>M.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Research on strawberry flower recognition algorithm based on image processing</article-title>. <source>Digit. Technol. Appl.</source> <volume>37</volume>, <fpage>109</fpage>&#x2013;<lpage>111</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.19695/j.cnki.cn12-1369.2019.12.64</pub-id>
</mixed-citation>
</ref>
<ref id="B37">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Deng</surname> <given-names>Y.</given-names></name>
<name><surname>Wu</surname> <given-names>H.</given-names></name>
<name><surname>Zhu</surname> <given-names>H.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Citrus flower recognition and flower quantity statistics based on instance segmentation</article-title>. <source>Trans. Chin. Soc Agric. Eng.</source> <volume>36</volume>, <fpage>200</fpage>&#x2013;<lpage>207</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.11975/j.issn.1002-6819.2020.24.024</pub-id>
</mixed-citation>
</ref>
<ref id="B67">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ding</surname> <given-names>S.</given-names></name>
<name><surname>Xue</surname> <given-names>X.</given-names></name>
<name><surname>Cai</surname> <given-names>C.</given-names></name>
<name><surname>Qin</surname> <given-names>W.</given-names></name>
<name><surname>Fang</surname> <given-names>J.</given-names></name>
<name><surname>Sun</surname> <given-names>Z.</given-names></name>
</person-group> (<year>2014</year>). 
<article-title>Design and experiment on handheld air-assisted pollination device</article-title>. <source>Trans. Chin. Soc Agric. Eng.</source> <volume>31</volume>, <fpage>68</fpage>&#x2013;<lpage>75</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3969/j.issn.1002-6819.2014.z1.010</pub-id>, PMID: <pub-id pub-id-type="pmid">35900448</pub-id>
</mixed-citation>
</ref>
<ref id="B18">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Dorj</surname> <given-names>U. O.</given-names></name>
<name><surname>Lee</surname> <given-names>M.</given-names></name>
<name><surname>Lee</surname> <given-names>K. K.</given-names></name>
<name><surname>Jeong</surname> <given-names>G.</given-names></name>
</person-group> (<year>2013</year>). 
<article-title>A novel technique for tangerine yield prediction using flower detection algorithm</article-title>. <source>Int. J. Pattern Recognit. Artif. Intell.</source> <volume>27</volume>, <fpage>1354007</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1142/S0218001413540074</pub-id>, PMID: <pub-id pub-id-type="pmid">40951326</pub-id>
</mixed-citation>
</ref>
<ref id="B71">
<mixed-citation publication-type="web">
<person-group person-group-type="author"><collab>Dropcopter</collab>
</person-group> (<year>2022</year>). Available online at: <uri xlink:href="https://www.dropcopter.com/">https://www.dropcopter.com/</uri> (Accessed <date-in-citation content-type="access-date">September 2, 2022</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B47">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Estrada</surname> <given-names>J. S.</given-names></name>
<name><surname>Vasconez</surname> <given-names>J. P.</given-names></name>
<name><surname>Fu</surname> <given-names>L.</given-names></name>
<name><surname>Cheein</surname> <given-names>F. A.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Deep Learning based flower detection and counting in highly populated images: A peach grove case study</article-title>. <source>J. Agric. Food Res.</source> <volume>15</volume>, <fpage>100930</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jafr.2023.100930</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B30">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Eyles</surname> <given-names>A.</given-names></name>
<name><surname>Close</surname> <given-names>D. C.</given-names></name>
<name><surname>Quarrell</surname> <given-names>S. R.</given-names></name>
<name><surname>Allen</surname> <given-names>G. R.</given-names></name>
<name><surname>Spurr</surname> <given-names>C. J.</given-names></name>
<name><surname>Barry</surname> <given-names>K. M.</given-names></name>
<etal/>
</person-group>. (<year>2022</year>). 
<article-title>Feasibility of mechanical pollination in tree fruit and nut crops: A review</article-title>. <source>Agronomy</source> <volume>12</volume>, <fpage>1113</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy12051113</pub-id>, PMID: <pub-id pub-id-type="pmid">41725453</pub-id>
</mixed-citation>
</ref>
<ref id="B38">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Fan</surname> <given-names>T.</given-names></name>
<name><surname>Gu</surname> <given-names>J.</given-names></name>
<name><surname>Wang</surname> <given-names>W.</given-names></name>
<name><surname>Li</surname> <given-names>C.</given-names></name>
<name><surname>Wang</surname> <given-names>W.</given-names></name>
<name><surname>Zuo</surname> <given-names>Y.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>A lightweight honeysuckle recognition method based on improved YOLOv5s</article-title>. <source>Trans. Chin. Soc Agric. Eng.</source> <volume>39</volume>, <fpage>192</fpage>&#x2013;<lpage>200</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.11975/j.issn.1002-6819.2023.01.018</pub-id>
</mixed-citation>
</ref>
<ref id="B17">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Feng</surname> <given-names>Q.</given-names></name>
<name><surname>Wang</surname> <given-names>X.</given-names></name>
<name><surname>Jiang</surname> <given-names>K.</given-names></name>
<name><surname>Zhou</surname> <given-names>J.</given-names></name>
<name><surname>Zhang</surname> <given-names>R.</given-names></name>
<name><surname>Ma</surname> <given-names>W.</given-names></name>
</person-group> (<year>2013</year>). 
<article-title>Design and testing of key components for an automatic flower seedling transplanter</article-title>. <source>Trans. Chin. Soc Agric. Eng.</source> <volume>29</volume>, <fpage>21</fpage>&#x2013;<lpage>27</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3969/j.issn.1002-6819.2013.21.003</pub-id>, PMID: <pub-id pub-id-type="pmid">35900448</pub-id>
</mixed-citation>
</ref>
<ref id="B92">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ferreira</surname> <given-names>A. I. S.</given-names></name>
<name><surname>Da Silva</surname> <given-names>N. F. F.</given-names></name>
<name><surname>Mesquita</surname> <given-names>F. N.</given-names></name>
<name><surname>Rosa</surname> <given-names>T. C.</given-names></name>
<name><surname>Monz&#xf3;n</surname> <given-names>V. H.</given-names></name>
<name><surname>Mesquita-Neto</surname> <given-names>J. N.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Automatic acoustic recognition of pollinating bee species can be highly improved by Deep Learning models accompanied by pre-training and strong data augmentation</article-title>. <source>Front. Plant Sci.</source> <volume>14</volume>, <elocation-id>1081050</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2023.1081050</pub-id>, PMID: <pub-id pub-id-type="pmid">37123860</pub-id>
</mixed-citation>
</ref>
<ref id="B66">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gao</surname> <given-names>C.</given-names></name>
<name><surname>He</surname> <given-names>L.</given-names></name>
<name><surname>Fang</surname> <given-names>W.</given-names></name>
<name><surname>Wu</surname> <given-names>Z.</given-names></name>
<name><surname>Jiang</surname> <given-names>H.</given-names></name>
<name><surname>Li</surname> <given-names>R.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>A novel pollination robot for kiwifruit flower based on preferential flowers selection and precisely target</article-title>. <source>Comput. Electron. Agric.</source> <volume>207</volume>, <fpage>107762</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.107762</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B31">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ge</surname> <given-names>Y.</given-names></name>
<name><surname>Xiong</surname> <given-names>Y.</given-names></name>
<name><surname>Tenorio</surname> <given-names>G. L.</given-names></name>
<name><surname>From</surname> <given-names>P. J.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Fruit localization and environment perception for strawberry harvesting robots</article-title>. <source>IEEE Access</source> <volume>7</volume>, <fpage>147642</fpage>&#x2013;<lpage>147652</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2019.2946369</pub-id>, PMID: <pub-id pub-id-type="pmid">41116384</pub-id>
</mixed-citation>
</ref>
<ref id="B34">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gen&#xe9;-Mola</surname> <given-names>J.</given-names></name>
<name><surname>Sanz-Cortiella</surname> <given-names>R.</given-names></name>
<name><surname>Rosell-Polo</surname> <given-names>J. R.</given-names></name>
<name><surname>Morros</surname> <given-names>J. R.</given-names></name>
<name><surname>Ruiz-Hidalgo</surname> <given-names>J.</given-names></name>
<name><surname>Vilaplana</surname> <given-names>V.</given-names></name>
<etal/>
</person-group>. (<year>2020</year>). 
<article-title>Fruit detection and 3D location using instance segmentation neural networks and structure-from-motion photogrammetry</article-title>. <source>Comput. Electron. Agric.</source> <volume>169</volume>, <fpage>105165</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2019.105165</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B40">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gong</surname> <given-names>W.</given-names></name>
<name><surname>Yang</surname> <given-names>Z.</given-names></name>
<name><surname>Li</surname> <given-names>K.</given-names></name>
<name><surname>Hao</surname> <given-names>W.</given-names></name>
<name><surname>He</surname> <given-names>Z.</given-names></name>
<name><surname>Ding</surname> <given-names>X.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>A method for detecting peach blossoms in natural environments based on improved YOLOv5s</article-title>. <source>Trans. Chin. Soc Agric. Eng.</source> <volume>39</volume>, <fpage>177</fpage>&#x2013;<lpage>185</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.11975/j.issn.1002-6819.2023.01.019</pub-id>
</mixed-citation>
</ref>
<ref id="B6">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gu</surname> <given-names>J.</given-names></name>
<name><surname>Wang</surname> <given-names>Z.</given-names></name>
<name><surname>Kuen</surname> <given-names>J.</given-names></name>
<name><surname>Ma</surname> <given-names>L.</given-names></name>
<name><surname>Shahroudy</surname> <given-names>A.</given-names></name>
<name><surname>Shuai</surname> <given-names>B.</given-names></name>
<etal/>
</person-group>. (<year>2018</year>). 
<article-title>Recent advances in convolutional neural networks</article-title>. <source>Pattern Recognit.</source> <volume>77</volume>, <fpage>354</fpage>&#x2013;<lpage>377</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.patcog.2017.10.013</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B33">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Hao</surname> <given-names>W.</given-names></name>
<name><surname>Ding</surname> <given-names>X.</given-names></name>
<name><surname>He</surname> <given-names>Z.</given-names></name>
<name><surname>Li</surname> <given-names>K.</given-names></name>
<name><surname>Gong</surname> <given-names>W.</given-names></name>
<name><surname>Li</surname> <given-names>Z.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Development and evaluation of precision liquid pollinator for kiwifruit</article-title>. <source>Comput. Electron. Agric.</source> <volume>213</volume>, <fpage>108193</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.108193</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B95">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Hao</surname> <given-names>W.</given-names></name>
<name><surname>Zhang</surname> <given-names>J.</given-names></name>
<name><surname>Xu</surname> <given-names>W.</given-names></name>
<name><surname>Fan</surname> <given-names>X.</given-names></name>
<name><surname>Norton</surname> <given-names>T.</given-names></name>
<name><surname>Cui</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2026</year>). 
<article-title>Clip-assisted flower detection and wind-compensated precision liquid pollination robot for kiwifruit orchards</article-title>. <source>Comput. Electron. Agric.</source> <volume>241</volume>, <fpage>111250</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2025.111250</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B24">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Huang</surname> <given-names>Y.</given-names></name>
<name><surname>Qian</surname> <given-names>Y.</given-names></name>
<name><surname>Wei</surname> <given-names>H.</given-names></name>
<name><surname>Lu</surname> <given-names>Y.</given-names></name>
<name><surname>Ling</surname> <given-names>B.</given-names></name>
<name><surname>Qin</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>A survey of deep learning-based object detection methods in crop counting</article-title>. <source>Comput. Electron. Agric.</source> <volume>215</volume>, <fpage>108425</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.108425</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B103">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Huang</surname> <given-names>J.</given-names></name>
<name><surname>Yi</surname> <given-names>F.</given-names></name>
<name><surname>Cui</surname> <given-names>Y.</given-names></name>
<name><surname>Wang</surname> <given-names>X.</given-names></name>
<name><surname>Jin</surname> <given-names>C.</given-names></name>
<name><surname>Cheein</surname> <given-names>F. A.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Design and implementation of a seed potato cutting robot using deep learning and delta robotic system with accuracy and speed for automated processing of agricultural products</article-title>. <source>Comput. Electron. Agric.</source> <volume>237</volume>, <fpage>110716</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2025.110716</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B32">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ishita</surname> <given-names>M.</given-names></name>
<name><surname>Geetika</surname> <given-names>S.</given-names></name>
<name><surname>Yogita</surname> <given-names>G.</given-names></name>
<name><surname>Anuradha</surname> <given-names>D.</given-names></name>
<name><surname>Rastogi</surname> <given-names>P.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Robotic path planning using flower pollination algorithm</article-title>. <source>Recent Adv. Comput. Sci. Commun.</source> <volume>13</volume>, <fpage>191</fpage>&#x2013;<lpage>199</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.2174/2213272413666200212104040</pub-id>, PMID: <pub-id pub-id-type="pmid">41727521</pub-id>
</mixed-citation>
</ref>
<ref id="B101">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jiang</surname> <given-names>A.</given-names></name>
<name><surname>Ahamed</surname> <given-names>T.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Development of an autonomous navigation system for orchard spraying robots integrating a thermal camera and LiDAR using a deep learning algorithm under low-and no-light conditions</article-title>. <source>Comput. Electron. Agric.</source> <volume>235</volume>, <fpage>110359</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2025.110359</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B64">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jiyu</surname> <given-names>L.</given-names></name>
<name><surname>Lan</surname> <given-names>Y.</given-names></name>
<name><surname>Jianwei</surname> <given-names>W.</given-names></name>
<name><surname>Shengde</surname> <given-names>C.</given-names></name>
<name><surname>Cong</surname> <given-names>H.</given-names></name>
<name><surname>Qi</surname> <given-names>L.</given-names></name>
<etal/>
</person-group>. (<year>2017</year>). 
<article-title>Distribution law of rice pollen in the wind field of small UAV</article-title>. <source>Int. J. Agric. Biol. Eng.</source> <volume>10</volume>, <fpage>32</fpage>&#x2013;<lpage>40</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.25165/j.ijabe.20171004.3103</pub-id>
</mixed-citation>
</ref>
<ref id="B25">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kamata</surname> <given-names>T.</given-names></name>
<name><surname>Roshanianfard</surname> <given-names>A.</given-names></name>
<name><surname>Noguchi</surname> <given-names>N.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>Heavy-weight crop harvesting robot-controlling algorithm</article-title>. <source>IFAC-PapersOnLine</source> <volume>51</volume>, <fpage>244</fpage>&#x2013;<lpage>249</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ifacol.2018.08.165</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B69">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kempe</surname> <given-names>K.</given-names></name>
<name><surname>Gils</surname> <given-names>M.</given-names></name>
</person-group> (<year>2011</year>). 
<article-title>Pollination control technologies for hybrid breeding</article-title>. <source>Mol. Breed.</source> <volume>27</volume>, <fpage>417</fpage>&#x2013;<lpage>437</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11032-011-9555-0</pub-id>, PMID: <pub-id pub-id-type="pmid">41732346</pub-id>
</mixed-citation>
</ref>
<ref id="B502">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Khubaib</surname> <given-names>M. S.</given-names></name>
<etal/>
</person-group>. (<year>2021</year>). 
<article-title>A vision-guided servo control for an automated pollination robot</article-title>. <source>IEEE Access</source> <volume>9</volume>, <fpage>13425</fpage>&#x2013;<lpage>13437</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2021.3051457</pub-id>, PMID: <pub-id pub-id-type="pmid">41116384</pub-id>
</mixed-citation>
</ref>
<ref id="B22">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Krizhevsky</surname> <given-names>A.</given-names></name>
<name><surname>Sutskever</surname> <given-names>I.</given-names></name>
<name><surname>Hinton</surname> <given-names>G. E.</given-names></name>
</person-group> (<year>2012</year>). 
<article-title>ImageNet classification with deep convolutional neural networks</article-title>. <source>Adv. Neural Inf. Process. Syst.</source> <volume>25</volume>, <fpage>1097</fpage>&#x2013;<lpage>1105</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1145/3065386</pub-id>, PMID: <pub-id pub-id-type="pmid">40727313</pub-id>
</mixed-citation>
</ref>
<ref id="B16">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kuang</surname> <given-names>M.</given-names></name>
<name><surname>Li</surname> <given-names>X.</given-names></name>
<name><surname>Chen</surname> <given-names>N.</given-names></name>
<name><surname>Liu</surname> <given-names>D.</given-names></name>
<name><surname>Xiang</surname> <given-names>Y.</given-names></name>
<name><surname>Liu</surname> <given-names>F.</given-names></name>
<etal/>
</person-group>. (<year>2025</year>a). 
<article-title>A lightweight method for pepper flower object detection based on improved YOLOv8n</article-title>. <source>Trans. Chin. Soc Agric. Mach.</source> <volume>41</volume>, <fpage>198</fpage>&#x2013;<lpage>207</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.6041/j.issn.1000-1298.2024.12.025</pub-id>
</mixed-citation>
</ref>
<ref id="B54">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kuang</surname> <given-names>M.</given-names></name>
<name><surname>Li</surname> <given-names>X.</given-names></name>
<name><surname>Chen</surname> <given-names>S.</given-names></name>
<name><surname>Liu</surname> <given-names>D.</given-names></name>
<name><surname>Xiang</surname> <given-names>Y.</given-names></name>
<name><surname>Liu</surname> <given-names>F.</given-names></name>
<etal/>
</person-group>. (<year>2025</year>c). 
<article-title>A lightweight target detection method for pepper flowers based on improved YOLOv8n</article-title>. <source>Trans. Chin. Soc Agric. Eng.</source> <volume>41</volume>, <fpage>198</fpage>&#x2013;<lpage>207</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.6041/j.issn.1000-1298.2024.12.025</pub-id>
</mixed-citation>
</ref>
<ref id="B53">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kuang</surname> <given-names>M.</given-names></name>
<name><surname>Xie</surname> <given-names>F.</given-names></name>
<name><surname>Liu</surname> <given-names>D.</given-names></name>
<name><surname>Wu</surname> <given-names>B.</given-names></name>
<name><surname>Chen</surname> <given-names>S.</given-names></name>
<name><surname>Xiang</surname> <given-names>Y.</given-names></name>
<etal/>
</person-group>. (<year>2025</year>b). 
<article-title>A refined YOLOv5n-based method for detecting pepper flower objects integrating transfer learning</article-title>. <source>Appl. Soft Comput.</source> <volume>171</volume>, <fpage>114400</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.asoc.2025.114400</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B501">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>J.</given-names></name>
<name><surname>Lan</surname> <given-names>Y.</given-names></name>
<name><surname>Wang</surname> <given-names>J.</given-names></name>
<name><surname>Chen</surname> <given-names>S.</given-names></name>
<name><surname>Huang</surname> <given-names>C.</given-names></name>
<name><surname>Liu</surname> <given-names>Q.</given-names></name>
<etal/>
</person-group>. (<year>2017</year>). 
<article-title>Distribution law of rice pollen in the wind field of small UAV</article-title>. <source>Int. J. Agric. Biol. Eng.</source> <volume>10</volume>, <fpage>32</fpage>&#x2013;<lpage>40</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.25165/j.ijabe.20171004.3103</pub-id>
</mixed-citation>
</ref>
<ref id="B93">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>X.</given-names></name>
<name><surname>Du</surname> <given-names>J.</given-names></name>
<name><surname>Chen</surname> <given-names>X.</given-names></name>
<name><surname>Shi</surname> <given-names>F.</given-names></name>
<name><surname>Li</surname> <given-names>S.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Deep learning-based kiwifruit flower recognition method to facilitate automated pollination</article-title>. <source>Appl. Soft Comput.</source> <volume>113</volume>, <fpage>113855</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.asoc.2025.113855</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B83">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>K.</given-names></name>
<name><surname>Huo</surname> <given-names>Y.</given-names></name>
<name><surname>Liu</surname> <given-names>Y.</given-names></name>
<name><surname>Shi</surname> <given-names>Y.</given-names></name>
<name><surname>He</surname> <given-names>Z.</given-names></name>
<name><surname>Cui</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2022</year>a). 
<article-title>Design of a lightweight robotic arm for kiwifruit pollination</article-title>. <source>Comput. Electron. Agric.</source> <volume>198</volume>, <fpage>107114</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2022.107114</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B81">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>G.</given-names></name>
<name><surname>Suo</surname> <given-names>R.</given-names></name>
<name><surname>Zhao</surname> <given-names>G.</given-names></name>
<name><surname>Gao</surname> <given-names>C.</given-names></name>
<name><surname>Fu</surname> <given-names>L.</given-names></name>
<name><surname>Shi</surname> <given-names>F.</given-names></name>
<etal/>
</person-group>. (<year>2022</year>). 
<article-title>Real-time detection of kiwifruit flower and bud simultaneously in orchard using YOLOv4 for robotic pollination</article-title>. <source>Comput. Electron. Agric.</source> <volume>193</volume>, <fpage>106641</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2021.106641</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B82">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>K.</given-names></name>
<name><surname>Zhai</surname> <given-names>L.</given-names></name>
<name><surname>Pan</surname> <given-names>H.</given-names></name>
<name><surname>Shi</surname> <given-names>Y.</given-names></name>
<name><surname>Ding</surname> <given-names>X.</given-names></name>
<name><surname>Cui</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2022</year>b). 
<article-title>Identification of the operating position and orientation of a robotic kiwifruit pollinator</article-title>. <source>Biosyst. Eng.</source> <volume>222</volume>, <fpage>29</fpage>&#x2013;<lpage>44</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2022.07.014</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B63">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>J.</given-names></name>
<name><surname>Zhou</surname> <given-names>Z.</given-names></name>
<name><surname>Hu</surname> <given-names>L.</given-names></name>
<name><surname>Zang</surname> <given-names>Y.</given-names></name>
<name><surname>Xu</surname> <given-names>S.</given-names></name>
<name><surname>Liu</surname> <given-names>A.</given-names></name>
<etal/>
</person-group>. (<year>2014</year>). 
<article-title>Optimization of operation parameters for supplementary pollination in hybrid rice breeding using round multi-axis multi-rotor electric unmanned helicopter</article-title>. <source>Trans. Chin. Soc Agric. Eng.</source> <volume>30</volume>, <fpage>1</fpage>&#x2013;<lpage>9</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3969/j.issn.1002-6819.2014.10.001</pub-id>, PMID: <pub-id pub-id-type="pmid">35900448</pub-id>
</mixed-citation>
</ref>
<ref id="B72">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>M.</given-names></name>
<name><surname>Zhu</surname> <given-names>X.</given-names></name>
<name><surname>Li</surname> <given-names>Q.</given-names></name>
<name><surname>Jin</surname> <given-names>X.</given-names></name>
<name><surname>Xie</surname> <given-names>X.</given-names></name>
<name><surname>Ma</surname> <given-names>H.</given-names></name>
<etal/>
</person-group>. (<year>2026</year>). 
<article-title>Design and performance verification of an intelligent pollination robot for facility tomatoes</article-title>. <source>Comput. Electron. Agric.</source> <volume>240</volume>, <fpage>111139</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2025.111139</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B3">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lin</surname> <given-names>Q.</given-names></name>
<name><surname>Xin</surname> <given-names>Z.</given-names></name>
<name><surname>Kong</surname> <given-names>L.</given-names></name>
<name><surname>Wang</surname> <given-names>X.</given-names></name>
<name><surname>Yang</surname> <given-names>X.</given-names></name>
<name><surname>He</surname> <given-names>W.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Current status and breeding countermeasures of pepper industry development in China</article-title>. <source>J. China Agric. Univ.</source> <volume>28</volume>, <fpage>82</fpage>&#x2013;<lpage>95</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.11841/j.issn.1007-4333.2023.01.11</pub-id>
</mixed-citation>
</ref>
<ref id="B10">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lin</surname> <given-names>Y.</given-names></name>
<name><surname>Yang</surname> <given-names>Y.</given-names></name>
<name><surname>Wang</surname> <given-names>C.</given-names></name>
<name><surname>Mao</surname> <given-names>L.</given-names></name>
<name><surname>Wang</surname> <given-names>W.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>Early spring facility watermelon flower abnormalities and countermeasures</article-title>. <source>China Veg.</source> <volume>4)</volume>, <fpage>93</fpage>&#x2013;<lpage>94</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.19928/j.cnki.1000-6346.2017.04.023</pub-id>
</mixed-citation>
</ref>
<ref id="B89">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Liu</surname> <given-names>L.</given-names></name>
<name><surname>Liu</surname> <given-names>Z.</given-names></name>
<name><surname>Han</surname> <given-names>H.</given-names></name>
<name><surname>Jiang</surname> <given-names>Y.</given-names></name>
<name><surname>He</surname> <given-names>X.</given-names></name>
<name><surname>Liu</surname> <given-names>Y.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Influence of different liquid spray pollination parameters on pollen activity of fruit trees&#x2014;pear liquid spray pollination as an example</article-title>. <source>Horticulturae</source> <volume>9</volume>, <fpage>350</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/horticulturae9030350</pub-id>, PMID: <pub-id pub-id-type="pmid">41725453</pub-id>
</mixed-citation>
</ref>
<ref id="B15">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Liu</surname> <given-names>F.</given-names></name>
<name><surname>Liu</surname> <given-names>Y.</given-names></name>
<name><surname>Lin</surname> <given-names>S.</given-names></name>
<name><surname>Guo</surname> <given-names>W.</given-names></name>
<name><surname>Xu</surname> <given-names>F.</given-names></name>
<name><surname>Zhang</surname> <given-names>B.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>A fast recognition method for tomato fruits in complex environments based on improved YOLO</article-title>. <source>Trans. Chin. Soc Agric. Mach.</source> <volume>51</volume>, <fpage>229</fpage>&#x2013;<lpage>237</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.6041/j.issn.1000-1298.2020.11.026</pub-id>
</mixed-citation>
</ref>
<ref id="B96">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Liu</surname> <given-names>S.</given-names></name>
<name><surname>Zhang</surname> <given-names>X.</given-names></name>
<name><surname>Wang</surname> <given-names>X.</given-names></name>
<name><surname>Hou</surname> <given-names>X.</given-names></name>
<name><surname>Chen</surname> <given-names>X.</given-names></name>
<name><surname>Xu</surname> <given-names>J.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Tomato flower pollination features recognition based on binocular gray value-deformation coupled template matching</article-title>. <source>Comput. Electron. Agric.</source> <volume>214</volume>, <fpage>108345</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.108345</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B13">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Long</surname> <given-names>J.</given-names></name>
<name><surname>Zhao</surname> <given-names>C.</given-names></name>
<name><surname>Lin</surname> <given-names>S.</given-names></name>
<name><surname>Guo</surname> <given-names>W.</given-names></name>
<name><surname>Wen</surname> <given-names>C.</given-names></name>
<name><surname>Zhang</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Improved Mask R-CNN for fruit segmentation of tomatoes at different maturity stages in greenhouse environments</article-title>. <source>Trans. Chin. Soc Agric. Eng.</source> <volume>37</volume>, <fpage>100</fpage>&#x2013;<lpage>108</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.11975/j.issn.1002-6819.2021.14.011</pub-id>
</mixed-citation>
</ref>
<ref id="B90">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ma</surname> <given-names>W. H.</given-names></name>
<name><surname>Wu</surname> <given-names>W. Q.</given-names></name>
<name><surname>Song</surname> <given-names>H. L.</given-names></name>
<name><surname>Lei</surname> <given-names>J.</given-names></name>
<name><surname>Li</surname> <given-names>L. X.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Effects of different pollination methods on tomato fruits&#x2019; quality and metabolism</article-title>. <source>Front. Plant Sci.</source> <volume>16</volume>, <elocation-id>1560186</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2025.1560186</pub-id>, PMID: <pub-id pub-id-type="pmid">40256596</pub-id>
</mixed-citation>
</ref>
<ref id="B44">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Mu</surname> <given-names>X.</given-names></name>
<name><surname>He</surname> <given-names>L.</given-names></name>
<name><surname>Heinemann</surname> <given-names>P.</given-names></name>
<name><surname>Schupp</surname> <given-names>J.</given-names></name>
<name><surname>Karkee</surname> <given-names>M.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Mask R-CNN based apple flower detection and king flower identification for precision pollination</article-title>. <source>Smart Agric. Technol.</source> <volume>4</volume>, <fpage>100151</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.atech.2022.100151</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B87">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ni</surname> <given-names>Z.</given-names></name>
<name><surname>Li</surname> <given-names>Q.</given-names></name>
<name><surname>Zhang</surname> <given-names>M.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Efficient motion planning for chili flower pollination mechanism based on BI-RRT</article-title>. <source>Comput. Electron. Agric.</source> <volume>232</volume>, <fpage>110063</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2025.110063</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B21">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Ohi</surname> <given-names>N.</given-names></name>
<name><surname>Lassak</surname> <given-names>K.</given-names></name>
<name><surname>Watson</surname> <given-names>R.</given-names></name>
<name><surname>Strader</surname> <given-names>J.</given-names></name>
<name><surname>Du</surname> <given-names>Y.</given-names></name>
<name><surname>Yang</surname> <given-names>C.</given-names></name>
<etal/>
</person-group>. (<year>2018</year>). &#x201c;
<article-title>Design of an autonomous precision pollination robot</article-title>,&#x201d; in <conf-name>2018 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</conf-name>. (<publisher-loc>Madrid, Spain</publisher-loc>: 
<publisher-name>IEEE</publisher-name>), <fpage>7711</fpage>&#x2013;<lpage>7718)</lpage>.
</mixed-citation>
</ref>
<ref id="B91">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Oubounyt</surname> <given-names>M.</given-names></name>
<name><surname>Louadi</surname> <given-names>Z.</given-names></name>
<name><surname>Tayara</surname> <given-names>H.</given-names></name>
<name><surname>Chong</surname> <given-names>K. T.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>DeePromoter: Robust promoter predictor using deep learning</article-title>. <source>Front. Genet.</source> <volume>10</volume>, <fpage>1</fpage>&#x2013;<lpage>9</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fgene.2019.00286</pub-id>, PMID: <pub-id pub-id-type="pmid">31024615</pub-id>
</mixed-citation>
</ref>
<ref id="B23">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Paul</surname> <given-names>N.</given-names></name>
<name><surname>Sunil</surname> <given-names>G. C.</given-names></name>
<name><surname>Horvath</surname> <given-names>D.</given-names></name>
<name><surname>Sun</surname> <given-names>X.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Deep learning for plant stress detection: A comprehensive review of technologies, challenges, and future directions</article-title>. <source>Comput. Electron. Agric.</source> <volume>229</volume>, <fpage>109734</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.109734</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B28">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Peng</surname> <given-names>H.</given-names></name>
<name><surname>Chen</surname> <given-names>H.</given-names></name>
<name><surname>Zhang</surname> <given-names>X.</given-names></name>
<name><surname>Liu</surname> <given-names>H.</given-names></name>
<name><surname>Chen</surname> <given-names>K.</given-names></name>
<name><surname>Xiong</surname> <given-names>J.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Retinanet_G2S: A multi-scale feature fusion-based network for fruit detection of punna navel oranges in complex field environments</article-title>. <source>Precis. Agric.</source> <volume>25</volume>, <fpage>889</fpage>&#x2013;<lpage>913</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11119-023-10098-6</pub-id>, PMID: <pub-id pub-id-type="pmid">41732346</pub-id>
</mixed-citation>
</ref>
<ref id="B27">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Qiu</surname> <given-names>C.</given-names></name>
<name><surname>Tian</surname> <given-names>G.</given-names></name>
<name><surname>Zhao</surname> <given-names>J.</given-names></name>
<name><surname>Liu</surname> <given-names>Q.</given-names></name>
<name><surname>Xie</surname> <given-names>S.</given-names></name>
<name><surname>Zheng</surname> <given-names>K.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Grape maturity detection and visual pre-positioning based on improved YOLOv4</article-title>. <source>Electronics</source> <volume>11</volume>, <fpage>2677</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/electronics11172677</pub-id>, PMID: <pub-id pub-id-type="pmid">41725453</pub-id>
</mixed-citation>
</ref>
<ref id="B36">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Salim</surname> <given-names>F.</given-names></name>
<name><surname>Saeed</surname> <given-names>F.</given-names></name>
<name><surname>Basurra</surname> <given-names>S.</given-names></name>
<name><surname>Qasem</surname> <given-names>S. N.</given-names></name>
<name><surname>Al-Hadhrami</surname> <given-names>T.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>DenseNet-201 and Xception pre-trained deep learning models for fruit recognition</article-title>. <source>Electronics</source> <volume>12</volume>, <fpage>3132</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/electronics12143132</pub-id>, PMID: <pub-id pub-id-type="pmid">41725453</pub-id>
</mixed-citation>
</ref>
<ref id="B79">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Schuetz</surname> <given-names>C.</given-names></name>
<name><surname>Baur</surname> <given-names>J.</given-names></name>
<name><surname>Pfaff</surname> <given-names>J.</given-names></name>
<name><surname>Buschmann</surname> <given-names>T.</given-names></name>
<name><surname>Ulbrich</surname> <given-names>H.</given-names></name>
</person-group> (<year>2015</year>). &#x201c;
<article-title>Evaluation of a direct optimization method for trajectory planning of a 9-DOF redundant fruit-picking manipulator</article-title>,&#x201d; in <conf-name>2015 IEEE International Conference on Robotics and Automation (ICRA)</conf-name>. (<publisher-loc>Seattle, WA, USA</publisher-loc>: 
<publisher-name>IEEE</publisher-name>), <fpage>2660</fpage>&#x2013;<lpage>2666</lpage>.
</mixed-citation>
</ref>
<ref id="B42">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shang</surname> <given-names>Y.</given-names></name>
<name><surname>Geng</surname> <given-names>M.</given-names></name>
<name><surname>Fang</surname> <given-names>H.</given-names></name>
<name><surname>Cai</surname> <given-names>M.</given-names></name>
<name><surname>Wang</surname> <given-names>J.</given-names></name>
<name><surname>Song</surname> <given-names>H.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Using unmanned aerial vehicle acquired RGB images and Density-Cluster-Count model for tree-level apple flower quantification</article-title>. <source>Comput. Electron. Agric.</source> <volume>226</volume>, <fpage>109389</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.109389</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B43">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shang</surname> <given-names>Y.</given-names></name>
<name><surname>Zhang</surname> <given-names>Q.</given-names></name>
<name><surname>Song</surname> <given-names>H.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Application of YOLOv5s-based deep learning for apple flower detection in natural scenes</article-title>. <source>Trans. Chin. Soc Agric. Eng.</source> <volume>38</volume>, <fpage>222</fpage>&#x2013;<lpage>229</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.11975/j.issn.1002-6819.2022.06.026</pub-id>
</mixed-citation>
</ref>
<ref id="B9">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shen</surname> <given-names>M.</given-names></name>
<name><surname>Hao</surname> <given-names>F.</given-names></name>
</person-group> (<year>2004</year>). 
<article-title>Effects of internal and external shading on light environment in connected plastic greenhouses</article-title>. <source>Trans. Chin. Soc Agric. Mach.</source> <volume>5)</volume>, <fpage>110</fpage>&#x2013;<lpage>116</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3321/j.issn:1000-1298.2004.05.029</pub-id>, PMID: <pub-id pub-id-type="pmid">30704229</pub-id>
</mixed-citation>
</ref>
<ref id="B62">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shimizu</surname> <given-names>H.</given-names></name>
<name><surname>Hoshi</surname> <given-names>T.</given-names></name>
<name><surname>Nakamura</surname> <given-names>K.</given-names></name>
<name><surname>Park</surname> <given-names>J. E.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Development of a non-contact ultrasonic pollination device</article-title>. <source>Environ. Control Biol.</source> <volume>53</volume>, <fpage>85</fpage>&#x2013;<lpage>88</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.2525/ecb.53.85</pub-id>, PMID: <pub-id pub-id-type="pmid">21291192</pub-id>
</mixed-citation>
</ref>
<ref id="B61">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shimizu</surname> <given-names>H.</given-names></name>
<name><surname>Sato</surname> <given-names>T.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>Development of strawberry pollination system using ultrasonic radiation pressure</article-title>. <source>IFAC-PapersOnLine</source> <volume>51</volume>, <fpage>57</fpage>&#x2013;<lpage>60</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ifacol.2018.08.060</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B41">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Si</surname> <given-names>Y.</given-names></name>
<name><surname>Kong</surname> <given-names>D.</given-names></name>
<name><surname>Wang</surname> <given-names>K.</given-names></name>
<name><surname>Liu</surname> <given-names>L.</given-names></name>
<name><surname>Yang</surname> <given-names>X.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Recognition method of apple central and peripheral flowers based on CRV-YOLO</article-title>. <source>Trans. Chin. Soc Agric. Mach.</source> <volume>55</volume>, <fpage>278</fpage>&#x2013;<lpage>286</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.6041/j.issn.1000-1298.2024.02.025</pub-id>
</mixed-citation>
</ref>
<ref id="B84">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Strader</surname> <given-names>J.</given-names></name>
<name><surname>Nguyen</surname> <given-names>J.</given-names></name>
<name><surname>Tatsch</surname> <given-names>C.</given-names></name>
<name><surname>Du</surname> <given-names>Y.</given-names></name>
<name><surname>Lassak</surname> <given-names>K.</given-names></name>
<name><surname>Buzzo</surname> <given-names>B.</given-names></name>
<etal/>
</person-group>. (<year>2019</year>). &#x201c;
<article-title>Flower interaction subsystem for a precision pollination robot</article-title>,&#x201d; in <conf-name>2019 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</conf-name>. (<publisher-loc>Macau, China</publisher-loc>: 
<publisher-name>IEEE</publisher-name>),  <fpage>5534</fpage>&#x2013;<lpage>5541</lpage>.
</mixed-citation>
</ref>
<ref id="B68">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Suming</surname> <given-names>D.</given-names></name>
<name><surname>Xinyu</surname> <given-names>X.</given-names></name>
<name><surname>Jinbao</surname> <given-names>F.</given-names></name>
<name><surname>Zhu</surname> <given-names>S.</given-names></name>
<name><surname>Chen</surname> <given-names>C.</given-names></name>
<name><surname>Liangfu</surname> <given-names>Z.</given-names></name>
<etal/>
</person-group>. (<year>2015</year>). 
<article-title>Parameter optimization and experiment of air-assisted pollination device</article-title>. <source>Trans. Chin. Soc Agric. Eng.</source> <volume>31</volume>, <fpage>68</fpage>&#x2013;<lpage>75</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.11975/j.issn.1002-6819.2015.01.010</pub-id>
</mixed-citation>
</ref>
<ref id="B50">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Sun</surname> <given-names>K.</given-names></name>
<name><surname>Wang</surname> <given-names>X.</given-names></name>
<name><surname>Liu</surname> <given-names>S.</given-names></name>
<name><surname>Liu</surname> <given-names>C.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Apple, peach, and pear flower detection using semantic segmentation network and shape constraint level set</article-title>. <source>Comput. Electron. Agric.</source> <volume>185</volume>, <fpage>106150</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2021.106150</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B70">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tacconi</surname> <given-names>G.</given-names></name>
<name><surname>Michelotti</surname> <given-names>V.</given-names></name>
<name><surname>Cacioppo</surname> <given-names>O.</given-names></name>
<name><surname>Vittone</surname> <given-names>G.</given-names></name>
</person-group> (<year>2016</year>). 
<article-title>Kiwifruit pollination: The interaction between pollen quality, pollination systems and flowering stage</article-title>. <source>J. Berry Res.</source> <volume>6</volume>, <fpage>417</fpage>&#x2013;<lpage>426</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3233/JBR-160138</pub-id>, PMID: <pub-id pub-id-type="pmid">39743787</pub-id>
</mixed-citation>
</ref>
<ref id="B11">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tang</surname> <given-names>C.</given-names></name>
<name><surname>Wang</surname> <given-names>H.</given-names></name>
<name><surname>Li</surname> <given-names>M.</given-names></name>
<name><surname>Li</surname> <given-names>Z.</given-names></name>
<name><surname>Huang</surname> <given-names>Z.</given-names></name>
<name><surname>Luo</surname> <given-names>H.</given-names></name>
<etal/>
</person-group>. (<year>2012</year>). 
<article-title>Research status and development strategies of mechanical pollination in hybrid rice seed production</article-title>. <source>Trans. Chin. Soc Agric. Eng.</source> <volume>28</volume>, <fpage>1</fpage>&#x2013;<lpage>7</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3969/j.issn.1002-6819.2012.20.001</pub-id>, PMID: <pub-id pub-id-type="pmid">35900448</pub-id>
</mixed-citation>
</ref>
<ref id="B29">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tao</surname> <given-names>Z.</given-names></name>
<name><surname>Li</surname> <given-names>K.</given-names></name>
<name><surname>Rao</surname> <given-names>Y.</given-names></name>
<name><surname>Li</surname> <given-names>W.</given-names></name>
<name><surname>Zhu</surname> <given-names>J.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Strawberry maturity recognition based on improved YOLOv5</article-title>. <source>Agronomy</source> <volume>14</volume>, <fpage>460</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy14030460</pub-id>, PMID: <pub-id pub-id-type="pmid">41725453</pub-id>
</mixed-citation>
</ref>
<ref id="B4">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tian</surname> <given-names>H.</given-names></name>
<name><surname>L&#xfc;</surname> <given-names>X.</given-names></name>
<name><surname>Gao</surname> <given-names>Y.</given-names></name>
<name><surname>Tian</surname> <given-names>L.</given-names></name>
<name><surname>Liu</surname> <given-names>S.</given-names></name>
<name><surname>Li</surname> <given-names>X.</given-names></name>
<etal/>
</person-group>. (<year>2019</year>). 
<article-title>Effects of artificial pollination on fruit set rate and fruit growth of Rhododendron</article-title>. <source>Acta Agric. Univ. Jiangxiensis</source> <volume>41</volume>, <fpage>683</fpage>&#x2013;<lpage>690</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.13836/j.jjau.2019079</pub-id>
</mixed-citation>
</ref>
<ref id="B35">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wang</surname> <given-names>C.</given-names></name>
<name><surname>Luo</surname> <given-names>T.</given-names></name>
<name><surname>Zhao</surname> <given-names>L.</given-names></name>
<name><surname>Tang</surname> <given-names>Y.</given-names></name>
<name><surname>Zou</surname> <given-names>X.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Window zooming&#x2013;based localization algorithm of fruit and vegetable for harvesting robot</article-title>. <source>IEEE Access</source> <volume>7</volume>, <fpage>103639</fpage>&#x2013;<lpage>103649</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2019.2925812</pub-id>, PMID: <pub-id pub-id-type="pmid">41116384</pub-id>
</mixed-citation>
</ref>
<ref id="B48">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wang</surname> <given-names>C.</given-names></name>
<name><surname>Wang</surname> <given-names>Y.</given-names></name>
<name><surname>Liu</surname> <given-names>S.</given-names></name>
<name><surname>Lin</surname> <given-names>G.</given-names></name>
<name><surname>He</surname> <given-names>P.</given-names></name>
<name><surname>Zhang</surname> <given-names>Z.</given-names></name>
<etal/>
</person-group>. (<year>2022</year>). 
<article-title>Study on pear flowers detection performance of YOLO-PEFL model trained with synthetic target images</article-title>. <source>Front. Plant Sci.</source> <volume>13</volume>, <elocation-id>911473</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2022.911473</pub-id>, PMID: <pub-id pub-id-type="pmid">35747884</pub-id>
</mixed-citation>
</ref>
<ref id="B56">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wang</surname> <given-names>X.</given-names></name>
<name><surname>Zhang</surname> <given-names>Q.</given-names></name>
<name><surname>Su</surname> <given-names>A.</given-names></name>
</person-group> (<year>2013</year>). 
<article-title>Application of wasp pollination technology in greenhouse cherry tomatoes</article-title>. <source>North. Hortic.</source> <volume>11)</volume>, <fpage>45</fpage>&#x2013;<lpage>47</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.11869/hnny.2013.11.045</pub-id>
</mixed-citation>
</ref>
<ref id="B80">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wei</surname> <given-names>J.</given-names></name>
<name><surname>Pan</surname> <given-names>Y.</given-names></name>
<name><surname>Sun</surname> <given-names>L.</given-names></name>
<name><surname>Shang</surname> <given-names>H.</given-names></name>
<name><surname>Chen</surname> <given-names>X.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>A novel redundant cooperative control strategy for robotic pollination</article-title>. <source>Comput. Electron. Agric.</source> <volume>220</volume>, <fpage>108846</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.108846</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B74">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wen</surname> <given-names>C.</given-names></name>
<name><surname>Long</surname> <given-names>J.</given-names></name>
<name><surname>Zhang</surname> <given-names>Y.</given-names></name>
<name><surname>Guo</surname> <given-names>W.</given-names></name>
<name><surname>Lin</surname> <given-names>S.</given-names></name>
<name><surname>Liang</surname> <given-names>X.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>A 3D vision-based method for tomato flower pollination positioning</article-title>. <source>Trans. Chin. Soc Agric. Mach.</source> <volume>53</volume>, <fpage>320</fpage>&#x2013;<lpage>328</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.6041/j.issn.1000-1298.2022.11.036</pub-id>
</mixed-citation>
</ref>
<ref id="B59">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wu</surname> <given-names>J.</given-names></name>
<name><surname>Qin</surname> <given-names>Y.</given-names></name>
<name><surname>Zhao</surname> <given-names>J.</given-names></name>
</person-group> (<year>2008</year>). 
<article-title>Pollen tube growth is affected by exogenous hormones and correlated with hormone changes in styles in Torenia fournieri L</article-title>. <source>Plant Growth Regul.</source> <volume>55</volume>, <fpage>137</fpage>&#x2013;<lpage>148</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s10725-008-9268-5</pub-id>, PMID: <pub-id pub-id-type="pmid">41732346</pub-id>
</mixed-citation>
</ref>
<ref id="B97">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wu</surname> <given-names>Y.</given-names></name>
<name><surname>Fang</surname> <given-names>H.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Agricultural machinery scheduling under time window constraints using deep reinforcement learning</article-title>. <source>Biosyst. Eng.</source> <volume>257</volume>, <fpage>104188</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2025.104188</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B26">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Xie</surname> <given-names>H.</given-names></name>
<name><surname>Dai</surname> <given-names>N.</given-names></name>
<name><surname>Yang</surname> <given-names>X.</given-names></name>
<name><surname>Zhan</surname> <given-names>K.</given-names></name>
<name><surname>Liu</surname> <given-names>J.</given-names></name>
</person-group> (<year>2019</year>). &#x201c;
<article-title>Research on recognition methods of pomelo fruit hanging on trees base on machine vision</article-title>,&#x201d; in <source>2019 ASABE Annual International Meeting</source> (<publisher-loc>St. Joseph, MI, United States</publisher-loc>: 
<publisher-name>American Society of Agricultural and Biological Engineers</publisher-name>), <fpage>1</fpage>.
</mixed-citation>
</ref>
<ref id="B88">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xie</surname> <given-names>Y.</given-names></name>
<name><surname>Zhang</surname> <given-names>T.</given-names></name>
<name><surname>Yang</surname> <given-names>M.</given-names></name>
<name><surname>Lyu</surname> <given-names>H.</given-names></name>
<name><surname>Zou</surname> <given-names>Y.</given-names></name>
<name><surname>Sun</surname> <given-names>Y.</given-names></name>
<etal/>
</person-group>. (<year>2025</year>). 
<article-title>Engineering crop flower morphology facilitates robotization of cross-pollination and speed breeding</article-title>. <source>Cell</source> <volume>188</volume>, <fpage>5809</fpage>&#x2013;<lpage>5830</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.cell.2025.07.028</pub-id>, PMID: <pub-id pub-id-type="pmid">40795858</pub-id>
</mixed-citation>
</ref>
<ref id="B94">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xiong</surname> <given-names>J.</given-names></name>
<name><surname>Hu</surname> <given-names>Y.</given-names></name>
<name><surname>Gu</surname> <given-names>X.</given-names></name>
<name><surname>Yang</surname> <given-names>C.</given-names></name>
<name><surname>Cui</surname> <given-names>D.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Inversion of on-tree peach firmness via high-fidelity fruit finite element models and sim-to-real deep transfer learning</article-title>. <source>Biosyst. Eng.</source> <volume>259</volume>, <fpage>104291</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2025.104291</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B86">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xu</surname> <given-names>T.</given-names></name>
<name><surname>Li</surname> <given-names>X.</given-names></name>
<name><surname>Qi</surname> <given-names>Y.</given-names></name>
<name><surname>Zeng</surname> <given-names>J.</given-names></name>
<name><surname>L&#xfc;</surname> <given-names>X.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Liquid pollination experiment of pear trees using multi-rotor drones with horizontal trellis</article-title>. <source>Trans. Chin. Soc Agric. Mach.</source> <volume>54</volume>, <fpage>136</fpage>&#x2013;<lpage>141</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.6041/j.issn.1000-1298.2023.01.015</pub-id>
</mixed-citation>
</ref>
<ref id="B49">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xu</surname> <given-names>T.</given-names></name>
<name><surname>Qi</surname> <given-names>X.</given-names></name>
<name><surname>Lin</surname> <given-names>S.</given-names></name>
<name><surname>Zhang</surname> <given-names>Y.</given-names></name>
<name><surname>Ge</surname> <given-names>Y.</given-names></name>
<name><surname>Li</surname> <given-names>Z.</given-names></name>
<etal/>
</person-group>. (<year>2022</year>). 
<article-title>A neural network structure with attention mechanism and additional feature fusion layer for tomato flowering phase detection in pollination robots</article-title>. <source>Machines</source> <volume>10</volume>, <fpage>1076</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/machines10111076</pub-id>, PMID: <pub-id pub-id-type="pmid">41725453</pub-id>
</mixed-citation>
</ref>
<ref id="B73">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yang</surname> <given-names>M.</given-names></name>
<name><surname>Lyu</surname> <given-names>H.</given-names></name>
<name><surname>Zhao</surname> <given-names>Y.</given-names></name>
<name><surname>Sun</surname> <given-names>Y.</given-names></name>
<name><surname>Pan</surname> <given-names>H.</given-names></name>
<name><surname>Sun</surname> <given-names>Q.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Delivery of pollen to forsythia flower pistils autonomously and precisely using a robot arm</article-title>. <source>Comput. Electron. Agric.</source> <volume>214</volume>, <fpage>108274</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.108274</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B19">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yang</surname> <given-names>Y.</given-names></name>
<name><surname>Wang</surname> <given-names>Z.</given-names></name>
<name><surname>Liu</surname> <given-names>H.</given-names></name>
<name><surname>Liu</surname> <given-names>H.</given-names></name>
<name><surname>Luo</surname> <given-names>Z.</given-names></name>
<name><surname>Pan</surname> <given-names>Z.</given-names></name>
<etal/>
</person-group>. (<year>2022</year>). 
<article-title>Classification system for Phalaenopsis and flower area extraction experiment</article-title>. <source>J. Agric. Mech. Res.</source> <volume>44</volume>, <fpage>162</fpage>&#x2013;<lpage>166</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.13427/j.cnki.njyi.2022.10.025</pub-id>
</mixed-citation>
</ref>
<ref id="B60">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yao</surname> <given-names>F.</given-names></name>
<name><surname>Wang</surname> <given-names>Y.</given-names></name>
<name><surname>Hao</surname> <given-names>Y.</given-names></name>
<name><surname>Chen</surname> <given-names>J.</given-names></name>
<name><surname>Li</surname> <given-names>S.</given-names></name>
<name><surname>Zhang</surname> <given-names>X.</given-names></name>
<etal/>
</person-group>. (<year>2020</year>). 
<article-title>Optimization of pollen tube structural parameters for air-powered hybrid rice seed production pollenizers</article-title>. <source>Trans. Chin. Soc Agric. Eng.</source> <volume>36</volume>, <fpage>18</fpage>&#x2013;<lpage>25</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.11975/j.issn.1002-6819.2020.14.003</pub-id>
</mixed-citation>
</ref>
<ref id="B51">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yu</surname> <given-names>X.</given-names></name>
<name><surname>Kong</surname> <given-names>D. Y.</given-names></name>
<name><surname>Xie</surname> <given-names>X.</given-names></name>
<name><surname>Wang</surname> <given-names>Q.</given-names></name>
<name><surname>Bai</surname> <given-names>X. W.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Target recognition and detection of tomato pollination robot based on deep learning</article-title>. <source>Trans. Chin. Soc Agric. Eng.</source> <volume>38</volume>, <fpage>129</fpage>&#x2013;<lpage>137</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.11975/j.issn.1002-6819.2022.38.016</pub-id>
</mixed-citation>
</ref>
<ref id="B76">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Yuan</surname> <given-names>T.</given-names></name>
<name><surname>Zhang</surname> <given-names>S.</given-names></name>
<name><surname>Sheng</surname> <given-names>X.</given-names></name>
<name><surname>Wang</surname> <given-names>D.</given-names></name>
<name><surname>Gong</surname> <given-names>Y.</given-names></name>
<name><surname>Li</surname> <given-names>W.</given-names></name>
</person-group> (<year>2016</year>). &#x201c;
<article-title>An autonomous pollination robot for hormone treatment of tomato flower in greenhouse</article-title>,&#x201d; in <conf-name>2016 3rd International Conference on Systems and Informatics (ICSAI)</conf-name>. (<publisher-loc>Gwangju, South Korea</publisher-loc>: 
<publisher-name>IEEE</publisher-name>), <fpage>108</fpage>&#x2013;<lpage>113</lpage>.
</mixed-citation>
</ref>
<ref id="B46">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yue</surname> <given-names>Y.</given-names></name>
<name><surname>Zhang</surname> <given-names>W.</given-names></name>
<name><surname>Qi</surname> <given-names>L.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Research on the identification method of soybean flower growth status in the field based on improved YOLOv5</article-title>. <source>J. Chin. Agric. Mech.</source> <volume>45</volume>, <fpage>188</fpage>&#x2013;<lpage>193</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.13733/j.jcam.issn.2095-5553.2024.02.026</pub-id>
</mixed-citation>
</ref>
<ref id="B7">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>H.</given-names></name>
<name><surname>Zhu</surname> <given-names>T.</given-names></name>
<name><surname>Zhang</surname> <given-names>Y.</given-names></name>
<name><surname>Liu</surname> <given-names>S.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Research progress on precision pollination robots for greenhouse tomatoes</article-title>. <source>J. Shenyang Agric. Univ.</source> <volume>55</volume>, <fpage>798</fpage>&#x2013;<lpage>808</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.108519</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B52">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>J.</given-names></name>
<name><surname>Xie</surname> <given-names>J.</given-names></name>
<name><surname>Zhang</surname> <given-names>F.</given-names></name>
<name><surname>Gao</surname> <given-names>J.</given-names></name>
<name><surname>Yang</surname> <given-names>C.</given-names></name>
<name><surname>Song</surname> <given-names>C.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Greenhouse tomato detection and pose classification algorithm based on improved YOLOv5</article-title>. <source>Comput. Electron. Agric.</source> <volume>216</volume>, <fpage>108519</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.108519</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B5">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>L.</given-names></name>
<name><surname>Zhang</surname> <given-names>H.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Research progress on ecological functions of pollinating insects</article-title>. <source>Jiangsu Agric. Sci.</source> <volume>43</volume>, <fpage>9</fpage>&#x2013;<lpage>13</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.15889/j.issn.1002-1302.2015.07.003</pub-id>
</mixed-citation>
</ref>
<ref id="B100">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>Q.</given-names></name>
<name><surname>Zhang</surname> <given-names>Z.</given-names></name>
<name><surname>Manzoor</surname> <given-names>S. H.</given-names></name>
<name><surname>Li</surname> <given-names>T.</given-names></name>
<name><surname>Igathinathane</surname> <given-names>C.</given-names></name>
<name><surname>Li</surname> <given-names>W.</given-names></name>
<etal/>
</person-group>. (<year>2025</year>). 
<article-title>A comprehensive review of autonomous flower pollination techniques: Progress, challenges, and future directions</article-title>. <source>Comput. Electron. Agric.</source> <volume>237</volume>, <fpage>110577</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2025.110577</pub-id>, PMID: <pub-id pub-id-type="pmid">41735180</pub-id>
</mixed-citation>
</ref>
<ref id="B65">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>S.</given-names></name>
<name><surname>Cai</surname> <given-names>C.</given-names></name>
<name><surname>Li</surname> <given-names>J.</given-names></name>
<name><surname>Sun</surname> <given-names>T.</given-names></name>
<name><surname>Liu</surname> <given-names>X.</given-names></name>
<name><surname>Tian</surname> <given-names>Y.</given-names></name>
<etal/>
</person-group>. (<year>2021</year>). 
<article-title>The airflow field characteristics of the unmanned agricultural aerial system on oilseed rape (Brassica napus) canopy for supplementary pollination</article-title>. <source>Agronomy</source> <volume>11</volume>, <fpage>2035</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy11102035</pub-id>, PMID: <pub-id pub-id-type="pmid">41725453</pub-id>
</mixed-citation>
</ref>
<ref id="B12">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>X.</given-names></name>
<name><surname>Wang</surname> <given-names>X.</given-names></name>
<name><surname>Wen</surname> <given-names>Y.</given-names></name>
<name><surname>Meng</surname> <given-names>L.</given-names></name>
<name><surname>Tang</surname> <given-names>Y.</given-names></name>
<name><surname>Li</surname> <given-names>J.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Effects of three pollination isolation methods on hybrid seed production of pepper</article-title>. <source>J. China Capsicum</source> <volume>21</volume>, <fpage>17</fpage>&#x2013;<lpage>19</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.16847/j.cnki.issn.1672-4542.2023.04.002</pub-id>
</mixed-citation>
</ref>
<ref id="B2">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhao</surname> <given-names>M.</given-names></name>
<name><surname>Hu</surname> <given-names>X.</given-names></name>
<name><surname>Chang</surname> <given-names>C.</given-names></name>
<name><surname>Meng</surname> <given-names>Z.</given-names></name>
<name><surname>Su</surname> <given-names>H.</given-names></name>
<name><surname>Duan</surname> <given-names>P.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Impact of a vertical heat exchanger tube on thermal environment in an arched greenhouse</article-title>. <source>Acta Energ. Sol. Sin.</source> <volume>44</volume>, <fpage>257</fpage>&#x2013;<lpage>263</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.19912/j.0254-0096.tynxb.2022-0339</pub-id>
</mixed-citation>
</ref>
<ref id="B8">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhao</surname> <given-names>Z.</given-names></name>
<name><surname>Liang</surname> <given-names>H.</given-names></name>
<name><surname>Kang</surname> <given-names>X.</given-names></name>
<name><surname>Xu</surname> <given-names>C.</given-names></name>
<name><surname>Wang</surname> <given-names>L.</given-names></name>
<name><surname>Jia</surname> <given-names>B.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Effects of summer shading on greenhouse energy balance and pepper growth and quality</article-title>. <source>North. Hortic.</source> <volume>4)</volume>, <fpage>51</fpage>&#x2013;<lpage>55</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.14088/j.cnki.issn0439-8114.2022.04.011</pub-id>
</mixed-citation>
</ref>
<ref id="B57">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zou</surname> <given-names>T. T.</given-names></name>
<name><surname>Lyu</surname> <given-names>S. T.</given-names></name>
<name><surname>Jiang</surname> <given-names>Q. L.</given-names></name>
<name><surname>Shang</surname> <given-names>S. H.</given-names></name>
<name><surname>Wang</surname> <given-names>X. F.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Pre-and post-pollination barriers between two exotic and five native Sagittaria species: Implications for species conservation</article-title>. <source>Plant Divers.</source> <volume>45</volume>, <fpage>456</fpage>&#x2013;<lpage>468</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.pld.2022.10.001</pub-id>, PMID: <pub-id pub-id-type="pmid">37601545</pub-id>
</mixed-citation>
</ref>
<ref id="B1">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zou</surname> <given-names>X.</given-names></name>
<name><surname>Ma</surname> <given-names>Y.</given-names></name>
<name><surname>Dai</surname> <given-names>X.</given-names></name>
<name><surname>Li</surname> <given-names>X.</given-names></name>
<name><surname>Yang</surname> <given-names>S.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>The spread and industrial development of chili in China</article-title>. <source>Acta Hortic. Sin.</source> <volume>47</volume>, <fpage>1715</fpage>&#x2013;<lpage>1726</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.16420/j.issn.0513-353x.2020-0103</pub-id>
</mixed-citation>
</ref>
</ref-list>
<fn-group>
<fn id="n1" fn-type="custom" custom-type="edited-by">
<p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/1107972">Ning Yang</ext-link>, Jiangsu University, China</p></fn>
<fn id="n2" fn-type="custom" custom-type="reviewed-by">
<p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2873839">Seyed Jalaleddin Mousavirad</ext-link>, Mid Sweden University, Sweden</p>
<p><ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3361622">Tianyang Lu</ext-link>, The University of Manchester, United Kingdom</p></fn>
</fn-group>
</back>
</article>