<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="review-article" dtd-version="1.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Plant Sci.</journal-id>
<journal-title-group>
<journal-title>Frontiers in Plant Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Plant Sci.</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">1664-462X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpls.2025.1734507</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Review</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Technology-driven approaches to intelligent mechanical weed control: a systematic review for sustainable weed management</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Das</surname><given-names>Samriddha</given-names></name>
<uri xlink:href="https://loop.frontiersin.org/people/3259506/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Upadhyay</surname><given-names>Arjun</given-names></name>
<uri xlink:href="https://loop.frontiersin.org/people/3211345/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Sun</surname><given-names>Xin</given-names></name>
<xref ref-type="corresp" rid="c001"><sup>*</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/958444/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project-administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
</contrib>
</contrib-group>
<aff id="aff1"><institution>Department of Agricultural and Biosystems Engineering, North Dakota State University</institution>, <city>Fargo</city>, <state>ND</state>,&#xa0;<country country="us">United States</country></aff>
<author-notes>
<corresp id="c001"><label>*</label>Correspondence: Xin Sun, <email xlink:href="mailto:xin.sun@ndsu.edu">xin.sun@ndsu.edu</email></corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2025-12-18">
<day>18</day>
<month>12</month>
<year>2025</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2025</year>
</pub-date>
<volume>16</volume>
<elocation-id>1734507</elocation-id>
<history>
<date date-type="received">
<day>29</day>
<month>10</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>24</day>
<month>11</month>
<year>2025</year>
</date>
<date date-type="rev-recd">
<day>17</day>
<month>11</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2025 Das, Upadhyay and Sun.</copyright-statement>
<copyright-year>2025</copyright-year>
<copyright-holder>Das, Upadhyay and Sun</copyright-holder>
<license>
<ali:license_ref start_date="2025-12-18">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<p>The intensifying global demand for sustainable agriculture has necessitated innovation in weed management, particularly through intelligent, non-chemical alternatives. Among these, smart mechanical weeding systems integrating artificial intelligence (AI), machine vision, and robotics are emerging as transformative tools for precise and eco-friendly weed control. While several recent reviews have examined intelligent weeding or machine vision-based weed management more broadly, a comprehensive and systematically structured synthesis focusing specifically on AI-driven mechanical weeding systems that integrate both vision and robotic actuation remains limited. This study presents a systematic review of 176 technical papers published between 2000 and 2024, with in-depth analysis of 33 key works, aiming to explore the design and performance of intelligent mechanical weed control systems in precision agriculture. The review investigates foundational mechanical weeding methods, recent advances in sensor integration and weed detection algorithms, and the use of robotic platforms for intra- and inter-row weeding. It highlights the critical role of RGB, LiDAR, hyperspectral sensors, and deep learning models in enabling real-time, selective weed removal. Comparative case studies showcase end effectors, control architecture, sensors, and techniques involved across diverse platforms. While significant progress has been made, challenges persist in weed-crop differentiation, model generalization, real-time actuation, and economic feasibility. The review proposes a set of design and operational guidelines addressing sensor fusion, adaptive tooling, platform modularity, and user-centric interfaces. This work provides a targeted, system-level roadmap for researchers, developers, and stakeholders in agricultural robotics, offering insights into current capabilities, gaps, and future directions to advance intelligent mechanical weeding for scalable and sustainable food production.</p>
</abstract>
<kwd-group>
<kwd>artificial intelligence</kwd>
<kwd>automated weeding systems</kwd>
<kwd>machine vision</kwd>
<kwd>precision agriculture</kwd>
<kwd>robotic weed control</kwd>
</kwd-group>
<funding-group>
<award-group id="gs1">
<funding-source id="sp1">
<institution-wrap>
<institution>National Institute of Food and Agriculture</institution>
<institution-id institution-id-type="doi" vocab="open-funder-registry" vocab-identifier="10.13039/open_funder_registry">10.13039/100005825</institution-id>
</institution-wrap>
</funding-source>
<award-id rid="sp1">2024-67022-42001, ND01487</award-id>
</award-group>
<award-group id="gs2">
<funding-source id="sp2">
<institution-wrap>
<institution>U.S. Department of Agriculture</institution>
<institution-id institution-id-type="doi" vocab="open-funder-registry" vocab-identifier="10.13039/open_funder_registry">10.13039/100000199</institution-id>
</institution-wrap>
</funding-source>
</award-group>
<funding-statement>The author(s) declare that financial support was received for the research and/or publication of this article. This review study is based upon work partially supported by the USDA-ARS Sustainable Sugar Beet Research Initiative, FAR0037112. This research was also supported in part by the intramural research program of the U.S. Department of Agriculture, National Institute of Food and Agriculture (The Agriculture and Food Research Initiative) (Award # 2024-67022-42001). Any opinions, findings, conclusions or recommendations expressed in this publication are those of the author(s) and do not necessarily reflect the view of the U.S. Department of Agriculture. This work is/was supported by the USDA National Institute of Food and Agriculture, Hatch project number ND01487.</funding-statement>
</funding-group>
<counts>
<fig-count count="14"/>
<table-count count="2"/>
<equation-count count="3"/>
<ref-count count="130"/>
<page-count count="25"/>
<word-count count="11333"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Sustainable and Intelligent Phytoprotection</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>Weeding is the process of removing unnecessary plants from farming and agricultural lands to prevent them from competing with desired ones over natural resources. These plants are generally recognized by their unpredictable growing locations along with their ability to reproductively proliferate without human intervention (<xref ref-type="bibr" rid="B34">Gao and Su, 2024</xref>). The existence of weeds adversely affects crop yield and leads to irregular maturation of crops, thereby complicating the harvesting process due to their disruption of consistent crop growth (<xref ref-type="bibr" rid="B111">Tshewang et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B127">Yu et&#xa0;al., 2019</xref>). For instance, weeds result in considerable yield losses in wheat, with winter wheat losses in the United States averaging 25.6%. This, combined with a 23.4% loss in Canada, culminates in a potential annual economic deficit of approximately $2.19 billion. Furthermore, for spring wheat, the losses average 33.2% in the United States and 19.5% when combined with Canada, incurring costs up to $1.39 billion. This situation emphasizes the escalating threat posed by weeds (<xref ref-type="bibr" rid="B31">Flessner et&#xa0;al., 2021</xref>). Effective weed management is imperative to alleviate the challenges presented by these plants and to enhance crop productivity while sustaining agricultural profitability. Although these practices are labor-intensive and costly, often necessitating substantial manpower and resources, there are instances where the expenses related to weed management may exceed the economic benefits derived from crop production. This highlights the necessity to assess traditional weed management practices and their inherent limitations (<xref ref-type="bibr" rid="B29">Dhakal et&#xa0;al., 2024</xref>).</p>
<p>The selection of weeding practices and implements is influenced by factors such as crop type, soil characteristics, and field conditions. Hand-weeding is commonly adopted for smaller landholdings, provided there is sufficient labor availability (<xref ref-type="bibr" rid="B3">Abebe, 2024</xref>). Broadly, traditional weeding methods can be categorized into three distinct approaches: physical, chemical, and biological. Each approach offers specific limitations, based on their operational efficiency, environmental impact, and cost-effectiveness (<xref ref-type="bibr" rid="B16">Bond et&#xa0;al., 2003</xref>; <xref ref-type="bibr" rid="B34">Gao and Su, 2024</xref>). Physical weeding techniques involve thermal technologies, such as laser and flame weeding, effectively controlling weeds but also come up with several challenges. The high temperatures generated in these processes can ignite dry materials in the field, creating fire hazards. Additionally, these methods can pose risks to humans and animals nearby. Flame weeding, in particular, significantly contributes to greenhouse gas emissions, raising environmental concerns due to its detrimental ecological impact (<xref ref-type="bibr" rid="B103">Sivesind et&#xa0;al., 2009</xref>). Mechanical weeding is another physical weeding technique and is a widely used traditional practice, with reported average weed removal efficiencies of approximately 80% based on reductions in both weed density and biomass (<xref ref-type="bibr" rid="B43">Jabran and Chauhan, 2015</xref>; <xref ref-type="bibr" rid="B61">Liu et&#xa0;al., 2023</xref>). However, it also has certain drawbacks. Tools like chain harrows, used for inter-row weeding, can physically damage crops, leading to bruising and stem breakage, which may inhibit plant growth and increase vulnerability to pathogen infestations. Moreover, the heavy design of mechanical weeding equipment can cause soil compaction, adversely affecting soil aeration and root development. Biological weed control employs living organisms, such as insects, fungi, or bacteria, or their byproducts, to suppress weed populations and lessen their impact on crops. Although this method aligns well with environmental principles, its practical application is limited by slow activation and response times, alongside the short half-life of these biological agents. Additionally, the use of bioherbicides derived from these organisms poses potential risks to human and animal health and may have unintended environmental repercussions, thus limiting their widespread adoption in sustainable agricultural systems. Chemical weed control remains the most effective and widely used method for managing weeds, primarily due to the effectiveness of herbicides against various weed populations. However, its extensive application brings significant drawbacks. Chemical herbicides can be quite expensive, increasing the financial burden on agricultural production. Prolonged exposure to these chemicals presents serious health risks to both humans and animals. For example, glyphosate, a widely used herbicide, was classified as &#x201c;probably carcinogenic to humans&#x201d; by the World Health Organization in 2015 (<xref ref-type="bibr" rid="B114">Van Bruggen et&#xa0;al., 2018</xref>). Furthermore, the significant use of chemical herbicides leads to environmental pollution, including soil contamination, water runoff, and harm to non-target organisms, raising concerns about their sustainability in agricultural practices.</p>
<p>To overcome the limitations of conventional weeding methods, intelligent weeding technologies have emerged as a key component of modern precision agriculture. Unlike traditional Integrated Weed Management (IWM), which combines general mechanical and chemical strategies, intelligent weeding integrates automation, sensing and artificial intelligence to achieve targeted, data-driven weed control (<xref ref-type="bibr" rid="B92">Riemens et&#xa0;al., 2022</xref>). Depending on the mode of actuation, intelligent weeding can be categorized into intelligent chemical, intelligent physical and intelligent mechanical approaches (<xref ref-type="bibr" rid="B35">Gerhards et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B48">Jiao et&#xa0;al., 2024</xref>). Intelligent chemical weeding employs site-specific or variable rate herbicide applications guided by machine vision, thereby reducing chemical usage (<xref ref-type="bibr" rid="B113">Upadhyay et&#xa0;al., 2024a</xref>). Intelligent physical weeding utilizes non-chemical energy sources &#x2013; such as lasers or thermal radiation &#x2013; to destroy weeds relying on imaging technologies (<xref ref-type="bibr" rid="B9">Bajwa et&#xa0;al., 2015</xref>; T. <xref ref-type="bibr" rid="B50">Jin and Han, 2024</xref>; <xref ref-type="bibr" rid="B60">Li et&#xa0;al., 2022</xref>). Meanwhile intelligent mechanical weeding physically removes or disrupts weeds using robotic end effectors and machine vision, offering an eco-friendly and residue free alternative (<xref ref-type="bibr" rid="B45">Jiang et&#xa0;al., 2023b</xref>; <xref ref-type="bibr" rid="B117">Visentin et&#xa0;al., 2023</xref>). Collectively, these intelligent systems align with sustainability and resource optimization objectives by minimizing chemical inputs, improving accuracy, and reducing labor requirements.</p>
<p>Despite the benefits of intelligent weed management, the escalating occurrence of herbicide resistant weeds has intensified the need for sustainable, non-chemical control strategies (<xref ref-type="bibr" rid="B28">D&#xe9;lye et&#xa0;al., 2013</xref>). Although technologies such as variable-rate spraying, site specific delivery and see and spray systems have improved herbicide efficiency, they can inadvertently accelerate the evolution of resistant weed biotypes (<xref ref-type="bibr" rid="B21">Chang et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B113">Upadhyay et&#xa0;al., 2024a</xref>). This growing resistance combined with the limited pipeline of new herbicides, underscores the urgency of adopting other non-chemical solutions (<xref ref-type="bibr" rid="B75">Nath et&#xa0;al., 2024</xref>). In this context, intelligent mechanical weeding stands out as a promising direction, integrating the precision of robotics with the selectivity of machine vision to address both inter- and intra-row weeds effectively. As a non-chemical method, it not only mitigates herbicide resistance, but also reduces soil and water contamination, fuel consumption and overall environmental burden (<xref ref-type="bibr" rid="B62">Machleb et&#xa0;al., 2020</xref>).</p>
<p>Mechanical weed control involves the physical removal or destruction of weeds through direct interaction with the soil and vegetation. The primary techniques employed in mechanical weeding include tillage, cutting, and pulling (<xref ref-type="bibr" rid="B25">Cloutier et&#xa0;al., 2007</xref>; <xref ref-type="bibr" rid="B42">Hussain et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B66">McCool et&#xa0;al., 2018</xref>). These methods disrupt weed growth by turning the soil and uprooting root systems thereby inflicting lethal injuries that prevent regrowth (<xref ref-type="bibr" rid="B128">Zawada et&#xa0;al., 2023</xref>). Their effectiveness largely depends on the method, timing, and intensity of operation (<xref ref-type="bibr" rid="B62">Machleb et&#xa0;al., 2020</xref>). Despite their effectiveness, conventional mechanical weeding approaches often lead to crop damage followed by soil compaction, adversely affecting soil aeration and root development (<xref ref-type="bibr" rid="B78">Pannacci et&#xa0;al., 2017</xref>). Moreover, the performance of mechanical weeders is influenced by field conditions and weather variability, which limit their adaptability across diverse agricultural contexts. Additional issues such as higher energy, fuel consumption and labor demand also reduce their economic feasibility (<xref ref-type="bibr" rid="B42">Hussain et&#xa0;al., 2018</xref>).</p>
<p>Recent advances have integrated automation, robotics, and computer vision into mechanical weeding, giving rise to intelligent mechanical weeders. These systems employ sensors and AI algorithms for real-time weed detection and selective removal, minimizing crop disturbance and operator dependency. Specialized end effectors such as finger weeders, rotary cultivators, and elastic comb mechanisms effectively manage coplex intra-row weeds that were difficult to control manually (<xref ref-type="bibr" rid="B22">Chang et&#xa0;al., 2021</xref>). For instance, the SMART CULTIVATOR by Stout Industrial Technology, uses True Vision software for crop recognition and adaptive blade control. It handles diverse crops including artichokes, broccoli, cabbage, and pumpkins&#x2014;with 99% identification accuracy, 1-2 <inline-formula>
<mml:math display="inline" id="im1"><mml:mrow><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mo stretchy="false">/</mml:mo><mml:mi>h</mml:mi></mml:mrow></mml:math></inline-formula> field coverage, and up to 96% labor reduction compared with manual weeding (<xref ref-type="bibr" rid="B106">Stout, 2020</xref>). These systems perform best at early weed-growth stages, roughly two to three weeks after sowing, when weeds are spatially distinct (<xref ref-type="bibr" rid="B43">Jabran and Chauhan, 2015</xref>). Under dense weed canopies, their precision declines, favoring hybrid strategies that pair conventional tools (e.g., harrows or hoes) for inter-row control with AI-based weeders for intra-row precision-maximizing efficiency, minimizing environmental impact and advancing sustainable resource optimized agriculture.</p>
<p>This review therefore aims to critically examine the potential and limitations of intelligent mechanical weed control, focusing on its integration with advanced sensing, perception, and actuation systems. It further identifies the technological gaps, operational bottlenecks, and future research pathways required to develop scalable, efficient, and sustainable robotic solutions that align with the broader goals of precision agriculture.</p>
<p>The recent reviews in weed management have predominantly adopted a comprehensive approach, encompassing the entire spectrum of available weed management techniques. These reviews typically provide an overview of various methods, highlighting their advantages, use cases, and technical findings, offering a holistic perspective on weed management challenges and solutions. However, most of these studies are dedicated to exploring the use of ground robots and UAVs for general weed management, often covering a broad range of aspects without delving deeply into specific techniques. Additionally, these reviews frequently separate the discussion of weed removal and weed detection, even though an integrated approach addressing both domains is critical for developing effective, sustainable solutions.</p>
<p>This paper aims to address the existing research gap by focusing on a critical aspect of weed management&#x2014;advancements and emerging trends in mechanical weed removal techniques for precise and intelligent weed removal. It demonstrates the use of imaging technologies for accurately localizing and positioning weeds, a foundational component of intelligent mechanical weeding systems. Furthermore, the review incorporates detailed case studies that illustrate prevailing trends and methodologies, along with a comparative analysis of the performance of platforms employing these technologies. By adopting this focused approach, the paper emphasizes the potential of integrating weed detection and removal into a cohesive system, offering valuable insights into contemporary precision agriculture practices.</p>
</sec>
<sec id="s2">
<label>2</label>
<title>Methodology</title>
<p>In the context of a literature review, two primary approaches are commonly employed: systematic review and narrative review. The systematic approach follows a structured methodology that addresses specific technical questions by systematically analyzing relevant research papers. It provides a comprehensive synthesis of results, methodologies, and key findings from existing studies. Conversely, the narrative approach focuses on providing a theoretical background and conceptual understanding of the subject matter, primarily adopting a qualitative perspective rather than a quantitative one (<xref ref-type="bibr" rid="B95">Rother, 2007</xref>). This review adopted a systematic approach guided by the PRISMA 2020 guidelines to ensure transparency, reproducibility, and methodological rigor (<xref ref-type="bibr" rid="B77">Page et&#xa0;al., 2021</xref>). This approach was chosen over a narrative approach to enable structured identification, screening, and synthesis of peer-reviewed studies addressing both traditional and smart mechanical weed control systems.</p>
<p>The overall workflow of the review from topic definition through article extraction and synthesis is summarized in <xref ref-type="fig" rid="f1"><bold>Figure&#xa0;1</bold></xref>. To retrieve relevant literature, two primary databases-Google Scholar and Web of Science were used between October 2024 to January 2025. These databases were selected for their broad interdisciplinary coverage spanning precision agriculture, agricultural robotics, and artificial intelligence. Cross verification confirmed that most relevant studies, indexed in other major engineering/computer-science databases, were already represented within these two databases ensuring comprehensive inclusion while minimizing redundancy.</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>Review paper flowchart layout.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1734507-g001.tif">
<alt-text content-type="machine-generated">Flowchart detailing the process for reviewing technical approaches to intelligent mechanical weed control. It begins with defining a review topic, followed by database selection and applying search criteria. Abstracts are read, and categories are defined for organizing the study. Categories include background advancements, fundamental elements, and technical challenges. Data is extracted on methods, techniques, and integration, leading to a conclusion summarizing key findings and contributions. Different sections are color-coded: green for research methodology, blue for review outline, yellow for data presentation, and pink for the conclusion.</alt-text>
</graphic></fig>
<p>A Boolean keyword-based strategy was adopted to capture the widest possible scope of research activity: (&#x201c;mechanical weeding&#x201d; OR &#x201c;mechanical weed control&#x201d; OR &#x201c;mechanical weeder&#x201d;) AND (&#x201c;computer vision&#x201d; OR &#x201c;machine vision&#x201d; OR &#x201c;image processing&#x201d; OR &#x201c;deep learning&#x201d; OR &#x201c;artificial intelligence&#x201d; OR &#x201c;vision&#x201d;).</p>
<p>The initial research retrieved 11,275 publications (Google Scholar = 7430; Web of Science = 3845). Duplicate records were removed through automatic deduplication followed by manual verification yielding 9862 unique articles. Titles and abstracts were screened according to predefined inclusion and exclusion criteria (<xref ref-type="table" rid="T1"><bold>Table&#xa0;1</bold></xref>). The process involved multiple iterations to verify relevance.</p>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Eligibility criteria and selection process for literature review.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="left">Criterion</th>
<th valign="middle" align="left">Considerations/procedures</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">Eligibility criteria</td>
<td valign="middle" align="left">Peer-reviewed English-language studies 2000-2024 (total records = 11275)</td>
</tr>
<tr>
<td valign="middle" align="left">Database search and keywords</td>
<td valign="middle" align="left">Databases: Google Scholar and Web of Science.<break/>Search query: (&#x201c;mechanical weeding&#x201d; OR &#x201c;mechanical weed control&#x201d; OR &#x201c;mechanical weeder&#x201d;) AND (&#x201c;computer vision&#x201d; OR &#x201c;machine vision&#x201d; OR &#x201c;image processing&#x201d; OR &#x201c;deep learning&#x201d; OR &#x201c;artificial intelligence&#x201d; OR &#x201c;vision&#x201d;).</td>
</tr>
<tr>
<td valign="middle" align="left">Exclusion criteria</td>
<td valign="middle" align="left">&#x2022;&#x2003;Duplicate and non-English papers<break/>&#x2022;&#x2003;Review articles<break/>&#x2022;&#x2003;Economic or non-mechanical weed-control studies<break/>&#x2022;&#x2003;Non-technical reports</td>
</tr>
<tr>
<td valign="middle" align="left">Quality assessment</td>
<td valign="middle" align="left">Five-domain rubric (design clarity, sensor/actuator details, data availability, validation method, completion report)</td>
</tr>
<tr>
<td valign="middle" align="left">Final dataset analysis</td>
<td valign="middle" align="left">176 eligible studies were retained for trend analysis, out of which 33 quality papers were selected for detailed review and comparison</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>After screening, 2705 articles were retained for full-text evaluation. Studies not written in English, review papers, purely economic assessments or those unrelated to mechanical or intelligent mechanical weeding were excluded. Ultimately, 176 technical papers directly addressing mechanical or automated weeding were included for quantitative analysis. From this dataset, 33 focal studies were selected for in-depth synthesis based on quality scoring rubric that assessed five dimensions: (i) design clarity, (ii) details of sensor/actuator integration, (iii) availability of algorithmic or performance data, (iv) validation method and (v) completion report. Each study was rated on a five-point scale (1 = low detail to 5 = comprehensive) with the highest scoring studies forming the analytical subset for further discussion. The complete identification and selection process has been illustrated in the PRISMA flow diagram, <xref ref-type="fig" rid="f2"><bold>Figure&#xa0;2</bold></xref>.</p>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>Flow diagram for study selection and inclusion.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1734507-g002.tif">
<alt-text content-type="machine-generated">Flowchart illustrating a review article screening process: Initially, 11,275 records were identified through database searches in Google Scholar and Web of Science. After removing 1,413 duplicates, 9,862 records were screened by title and abstract; 7,157 were excluded. Full-text assessments of 2,705 articles were performed, excluding 2,529 for reasons like non-related topics, language issues, review papers, economic analysis, or incomplete records. The process culminated in 176 studies forquantitative synthesis and 33 for qualitative synthesis.</alt-text>
</graphic></fig>
<p>Data extracted from the selected papers included mechanical actuation type (rotary, finger, blade, linear, etc.), sensing and detection technology (RGB, LiDAR, multispectral, hyperspectral), (c) robotic actuation configuration and key performance indicators such as precision, accuracy, operational speed etc. Potential sources of bias were qualitatively evaluated by examining the transparency of experimental design, availability of performance data and validation consistency across studies. To contextualize the growth of research in this domain, <xref ref-type="fig" rid="f3"><bold>Figure&#xa0;3</bold></xref> illustrates the trend in peer-reviewed publications on traditional and mechanical weeding between 2000 and 2024. The data show an increasing trend in work related to intelligent mechanical weeding after 2019 reflecting the rapid adoption of smart technologies in agricultural automation.</p>
<fig id="f3" position="float">
<label>Figure&#xa0;3</label>
<caption>
<p>The trend in published peer-reviewed article on mechanical weed control (2000-2024).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1734507-g003.tif">
<alt-text content-type="machine-generated">Bar chart comparing the number of publications on smart mechanical weeding and traditional mechanical weeding from 2000 to 2024. Green bars represent traditional weeding, peaking in 2015, with fluctuating trends. Blue bars represent smart weeding, showing increased publications after 2014, with upward trends continuing through 2024.</alt-text>
</graphic></fig>
<p>Several previous review studies have investigated different aspects of weed management, including weed control techniques, sensor-based weed detection, robotic weeding systems, and artificial intelligence-driven weed detection approaches (<xref ref-type="bibr" rid="B34">Gao and Su, 2024</xref>; <xref ref-type="bibr" rid="B62">Machleb et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B88">Rai et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B112">Upadhyay et&#xa0;al., 2024b</xref>). However, given the breadth of existing literature, the present review adopts a more specialized focus on smart mechanical weeding technologies, emphasizing recent advancements, their integration with imaging sensors for automation and actuation and the technical challenges involved.</p>
</sec>
<sec id="s3">
<label>3</label>
<title>Mechanical weeding: background, evolution, and key components</title>
<p>Mechanical weeding, a practice that employs tools, implements, and machinery for weed control, has been a cornerstone of agricultural weed management since ancient times, complementing manual hand-pulling. This method has demonstrated high effectiveness in eliminating weeds while ensuring no chemical residues are left on crops (<xref ref-type="bibr" rid="B78">Pannacci et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B130">Zimdahl, 2018</xref>). The mechanical approach primarily involves processes such as cutting, burying, or uprooting weeds, effectively destroying these undesired plants (<xref ref-type="bibr" rid="B42">Hussain et&#xa0;al., 2018</xref>).</p>
<sec id="s3_1">
<label>3.1</label>
<title>Background and historical advancements</title>
<p>A wide range of mechanical weeding tools and equipment has been utilized over time, including hoes, split-hoes, brush weeders, robotic weeders, row crop cultivators, finger weeders, and tine harrows (<xref ref-type="bibr" rid="B68">Mehdizadeh and Mushtaq, 2020</xref>). Among these, certain tools, such as flex tines are designed for manual operation, whereas equipment like harrows, rotary hoes and weeders are typically tractor-mounted or automated systems. <xref ref-type="fig" rid="f4"><bold>Figure&#xa0;4</bold></xref> gives a demonstration of the popular and widely used mechanical weeding tools which have been in practice over the years. Although alternative weed management techniques exist, mechanical weeding offers distinct advantages as a non-chemical, environmentally friendly approach that avoids pollution while enhancing soil health by loosening and improving soil fertility (<xref ref-type="bibr" rid="B61">Liu et&#xa0;al., 2023</xref>).</p>
<fig id="f4" position="float">
<label>Figure&#xa0;4</label>
<caption>
<p>Types of mechanical weeders &#x2013; <bold>(a)</bold> rotating hoe weeder (<xref ref-type="bibr" rid="B110">Trajkovski et&#xa0;al., 2024</xref>), <bold>(b)</bold> inter-row hoe (<xref ref-type="bibr" rid="B6">Alagbo et&#xa0;al., 2022</xref>), <bold>(c)</bold> roll hackle and finger weeder (<xref ref-type="bibr" rid="B32">Gagliardi et&#xa0;al., 2023</xref>), <bold>(d)</bold> flex tine weeder (<xref ref-type="bibr" rid="B62">Machleb et&#xa0;al., 2020</xref>) <bold>(e)</bold> cycloid hoe weeder (<xref ref-type="bibr" rid="B96">Rueda-Ayala et&#xa0;al., 2010</xref>), <bold>(f)</bold> sweep type cultivator weeder (<xref ref-type="bibr" rid="B128">Zawada et&#xa0;al., 2023</xref>), <bold>(g)</bold> basket weeder (<xref ref-type="bibr" rid="B40">Hoidal, 2019</xref>), <bold>(h)</bold> manual push blade weeder (<xref ref-type="bibr" rid="B90">Rajashekar et&#xa0;al., 2014</xref>), <bold>(i)</bold> Bourquin Organic Weedpuller (<xref ref-type="bibr" rid="B71">Moore et&#xa0;al., 2023</xref>), <bold>(j)</bold> inter-row cultivator weeder (<xref ref-type="bibr" rid="B30">Fennimore et&#xa0;al., 2013</xref>), <bold>(k)</bold> torsion weeder (<xref ref-type="bibr" rid="B79">Pannacci et&#xa0;al., 2018</xref>), <bold>(l)</bold> vertical tine eco-weeder (<xref ref-type="bibr" rid="B4">Ahmad et&#xa0;al., 2014</xref>).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1734507-g004.tif">
<alt-text content-type="machine-generated">A collage of various agricultural machinery (a) a green rotating hoe weeder, (b) a red and blue inter row hoe with no-till sweeps, (c) A tractor with roll and finger weeder, (d) A front view of flex-tine weeders, (e) a red cycloidal hoe weeder, (f) sweep type cultivator weeder with wheels, (g) red rotary basket weeder on a field, (h) orange manual push blade weeder, (i) tractor mounted weed puller, (j) Inter-row cultivator weeder, (k) close view of torsion weeder  working between plants. (l) vertical tine eco-weeder.</alt-text>
</graphic></fig>
<p>Traditionally, mechanical weeding relied on tillage operations using tools such as cultivators and rotavators, often integrated with chemical herbicides to improve weed eradication efficiency (<xref ref-type="bibr" rid="B53">Kouwenhoven et&#xa0;al., 1991</xref>). A 4-year study on newly planted pecans demonstrated the benefits of this integrated approach, achieving the highest tree diameter increase of 384% with comprehensive herbicide-based weed control, compared to 224% for mowing and 229% for untreated plots.</p>
<p>Disking and selective grass control resulted in 339% and 292% increases, respectively, while irrigation further enhanced cumulative diameter growth to 316% compared to 271% without irrigation (<xref ref-type="bibr" rid="B82">Patterson et&#xa0;al., 1990</xref>). However, rising environmental concerns and awareness of the adverse impacts of chemical herbicides have driven a shift towards smart and reduced use of herbicides, leading to the development of advanced machinery and techniques (<xref ref-type="bibr" rid="B81">Parish, 1990</xref>). For instance, integrating reduced herbicide rates with interrow cultivation in conservation tillage systems using rotary hoeing for corn effectively controlled weeds and maintained yields comparable to full-rate treatments, providing a sustainable alternative to large scale herbicide usage (<xref ref-type="bibr" rid="B18">Buhler et&#xa0;al., 1995</xref>). In another scenario, a mechanical weeder equipped with ground-contoured-following pressing-grass floats (GPF) and weeding rollers, achieved average weeding rates of ~87% in a two-season experiment in a paddy field (<xref ref-type="bibr" rid="B47">Jiao et&#xa0;al., 2022</xref>). Field experiments in soybean and sugar beet showed inter row hoeing increased weed control efficacy by 89% in soybean and 87% in sugar beet compared to the conventional methods. Precision hoeing increased the yields by 23 and 28% for sugar beet and soybean respectively (<xref ref-type="bibr" rid="B55">Kunz et&#xa0;al., 2015</xref>). <xref ref-type="fig" rid="f5"><bold>Figure&#xa0;5</bold></xref> presents a chronological timeline illustrating how mechanical weeding technologies have evolved over the decades&#x2014;from early manual implements to sophisticated automated and robotic systems&#x2014;highlighting the progression that has shaped modern intelligent mechanical weeding.</p>
<fig id="f5" position="float">
<label>Figure&#xa0;5</label>
<caption>
<p>Timeline for advancement in mechanical weeding over the decades.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1734507-g005.tif">
<alt-text content-type="machine-generated">Timeline illustrating the evolution of weeding methods. Before the 1800s: manual weeding with hands. 1800s: hoeing. 1900s: farm mechanization with machines. 2000s: sensor-based mechanical weeding. Since 2015: AI and machine vision-based robotic mechanical weeding.</alt-text>
</graphic></fig>
<p>In another experiment, a tractor drawn inter and intra row weeding system was developed for field crops combining active rotary tines for intra row weeding and passive tines for inter row weeding achieving weed mortality of 92.8% in maize and 84.1% in pigeon pea, with plant damage under 6% (<xref ref-type="bibr" rid="B20">Chandel et&#xa0;al., 2021</xref>).</p>
<p>The efficacy of mechanical weeding in controlling weed populations and enhancing crop yields, as demonstrated in various studies, underscores its significant role in promoting sustainable agricultural practices. Nevertheless, despite its numerous advantages, mechanical weeding is not devoid of challenges. These obstacles include: a) substantial initial investment and ongoing maintenance costs, in addition to the necessity for skilled labor; b) improper operation, which can result in considerable crop damage; c) uneven terrain that diminishes operational efficiency; d) limited effectiveness within crop rows, particularly for densely planted or closely spaced crops; e) the requirement for precise application timing, which is frequently impacted by weather conditions and stages of crop development (<xref ref-type="bibr" rid="B34">Gao and Su, 2024</xref>; <xref ref-type="bibr" rid="B115">van der Schans et&#xa0;al., 2006</xref>).</p>
<p>Although conventional mechanical weeding techniques work well for large-scale inter-row weed control with little disruption to crops, weeding within rows continues to pose a significant challenge. To overcome this issue, advancements in intelligent mechanical weeding systems have emerged, incorporating smart technologies to improve precision and efficiency. These systems more effectively address intra-row weeds while reducing labor-intensive tasks, providing a promising solution for contemporary agriculture (<xref ref-type="bibr" rid="B86">Quan et&#xa0;al., 2022</xref>). The next section delves into the essential elements and innovations that are propelling intelligent mechanical weeding methods forward.</p>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Fundamental elements of intelligent mechanical weeding</title>
<p>Intelligent mechanical weeding is an advanced technique that employs technologies such as computer vision, sensors, and precision actuation to detect, identify, and differentiate weeds from crops, enabling targeted weed removal without the use of chemicals (<xref ref-type="bibr" rid="B124">Xiang et&#xa0;al., 2024</xref>). Unlike traditional mechanical weeding, which predominantly targets inter-row weeds and relies on generalized tillage or cutting methods, intelligent weeding systems address both inter-row and intra-row weeds with greater precision (<xref ref-type="bibr" rid="B69">Melander et&#xa0;al., 2015</xref>). These systems utilize two primary approaches: one involves actively recognizing and removing weeds through automated actuation units, while the other leverages the uniform planting patterns of crops achieved through mechanical sowing to avoid crop interference while targeting weeds (<xref ref-type="bibr" rid="B46">Jiang et&#xa0;al., 2023a</xref>; <xref ref-type="bibr" rid="B59">LEMKEN, 2022</xref>; <xref ref-type="bibr" rid="B86">Quan et&#xa0;al., 2022</xref>). To understand the capabilities of intelligent mechanical weeding, it is essential to explore the key components and technologies, as it has been portrayed in <xref ref-type="fig" rid="f6"><bold>Figure&#xa0;6</bold></xref>.</p>
<fig id="f6" position="float">
<label>Figure&#xa0;6</label>
<caption>
<p>Components of intelligent mechanical weeding. *Dino (Naio Technology).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1734507-g006.tif">
<alt-text content-type="machine-generated">Diagram of &#x201c;Intelligent Mechanical Weeding&#x201d; featuring four components: &#x201c;Sensors &amp; Detection Technologies&#x201d; with cameras and a weed detection image; &#x201c;Mechanical Weeding End Effectors&#x201d; showing a mechanical device in soil; &#x201c;Path Planning &amp; Navigation Systems&#x201d; displaying a colorful 3D mapping image; &#x201c;Robotic Platform Integration&#x201d; with a green robotic vehicle in a field.</alt-text>
</graphic></fig>
<p>This targeted approach minimizes soil disturbance, reduces labor intensity, and improves efficiency, addressing the limitations of traditional methods, such as poor intra-row weed control and potential crop damage. Intelligent mechanical weeding systems are designed to operate seamlessly in diverse agricultural settings which can adapt to varying crop types and field conditions. Their effectiveness is dependent on the advanced components and technologies that enable precise weed detection, identification, and removal while maintaining high operational efficiency. The following sections will delve into these critical elements in detail.</p>
<sec id="s3_2_1">
<label>3.2.1</label>
<title>Sensors and detection technologies</title>
<p>In intelligent mechanical weeding systems, sensors are pivotal for detecting weeds, enabling precise actuation of the end effector, and navigating environmental obstacles. These sensors facilitate efficient weed management by distinguishing between crops and weeds and ensuring minimal crop damage. The primary types of sensors used in these systems include ultrasonic sensors, optical sensors, laser sensors, imaging sensors, RGB/light sensors, multispectral sensors, hyperspectral sensors, RGB, and depth cameras. Each sensor type contributes uniquely to enhancing the performance of weeding robots, providing crucial data for decision-making and operational accuracy (<xref ref-type="bibr" rid="B112">Upadhyay et&#xa0;al., 2024b</xref>). <xref ref-type="fig" rid="f7"><bold>Figure&#xa0;7</bold></xref> illustrates commonly utilized sensors in such intelligent weeding systems.</p>
<fig id="f7" position="float">
<label>Figure&#xa0;7</label>
<caption>
<p>Sensors in use in the intelligent weed management system: <bold>(a)</bold> LiDAR sensor (<xref ref-type="bibr" rid="B26">Collins, 2022</xref>), <bold>(b)</bold> optical sensor (<xref ref-type="bibr" rid="B27">Croplands, 2021</xref>), <bold>(c)</bold> Ultrasonic sensor (<xref ref-type="bibr" rid="B100">Schneider Electric, 2022</xref>), <bold>(d)</bold> Specim AFX17 Hyperspectral Camera (Media, 2022), <bold>(e)</bold> Astra Series RGB depth camera (<xref ref-type="bibr" rid="B118">Vit and Shani, 2018</xref>), <bold>(f)</bold> MicaSense RedEdge Panchromatic multispectral camera (<xref ref-type="bibr" rid="B70">MicaSense, 2023</xref>).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1734507-g007.tif">
<alt-text content-type="machine-generated">(a) Velodyne Lidar sensor, (b) Optical sensor, (c) Ultrasonic sensor, (d) Specim AFX17 Hyperspectral Camera, (e) Astra Series RGB depth camera, (f) MicaSense RedEdge Panchromatic multispectral camera.</alt-text>
</graphic></fig>
<p>Ultrasonic sensors, which estimate distance by emitting high-frequency sound waves and measuring the time taken for the echo to return, have been effectively used in weed detection, as demonstrated by <xref ref-type="bibr" rid="B7">And&#xfa;jar et&#xa0;al. (2012)</xref>, who used vertically mounted ultrasonic sensors to differentiate crops and weeds based on height, achieving reliable detection across samples with varying weed densities. These sensors are light-independent, cost-effective, and adaptable to different environments; however, their performance degrades in wet conditions, and they struggle to differentiate complex plant structures, making them prone to errors in crop-weed distinction and requiring careful calibration (<xref ref-type="bibr" rid="B65">MaxBotix, 2019</xref>). Optical sensors detect weeds by analyzing the spectral characteristics of plants through the reflection and interruption of light (<xref ref-type="bibr" rid="B107">Suhail, 2022</xref>), as shown in the study by <xref ref-type="bibr" rid="B120">Wang et&#xa0;al. (2001)</xref>, where classification rates reached 100% for wheat and bare soil and 71.6% for weeds. These sensors are low-cost, fast, and simpler than machine vision systems, making them suitable for real-time weed identification, although their accuracy may vary due to environmental factors and plant spectral variability (<xref ref-type="bibr" rid="B122">Wang et&#xa0;al., 2007</xref>). LiDAR sensors operate by emitting laser pulses to create 3D representations of surroundings and are widely used for navigation and weed detection in intelligent weeding robots; for instance, <xref ref-type="bibr" rid="B64">Malavazi et&#xa0;al. (2018)</xref> used 2D point cloud-based line extraction to detect crops on the Oz weeding robot. While LiDAR offers high-precision mapping and autonomous navigation capabilities, it requires significant investment, skilled operation, and suffers from limited resolution and environmental sensitivity.</p>
<p>Hyperspectral (HS) and multispectral (MS) sensors, which analyze plant spectral signatures across many wavebands, are powerful tools for weed identification; HS captures narrow, detailed wavebands, while MS provides broader bands and simpler data handling. <xref ref-type="bibr" rid="B37">Graham Ram et&#xa0;al. (2023)</xref> utilized hyperspectral imaging with supervised ML models to detect Palmer amaranth weeds, achieving 93.95% accuracy and a 0.95 F1-score, showcasing the strong potential of HS imaging in intelligent weeding. Despite their strengths, HS sensors are complex, expensive, and highly sensitive to lighting, requiring significant data processing, while MS sensors offer a more cost-effective but less detailed alternative.</p>
<p>RGB/light sensors, including RGB cameras, thermal cameras, and depth cameras, are the most used sensors for crop and weed classification and identification. These sensors capture real-time RGB images from the field as shown in <xref ref-type="fig" rid="f8"><bold>Figure&#xa0;8</bold></xref>, enabling targeted weed control and improved crop management.</p>
<fig id="f8" position="float">
<label>Figure&#xa0;8</label>
<caption>
<p>Workflow for weed identification based on RGB images.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1734507-g008.tif">
<alt-text content-type="machine-generated">Flowchart depicting a process involving image acquisition and analysis. Top left shows cameras labeled &#x201c;Image acquisition by camera/RGB sensor&#x201d; leading to a top right image of plants labeled &#x201c;Acquired RGB image dataset.&#x201d; From there, an arrow points to the bottom right, labeled &#x201c;DL architecture deployed on edge devices (Jetson devices),&#x201d; depicting a neural network and devices. An arrow then directs to the bottom left, showing an analyzed image with labeled bounding boxes and &#x201c;Final predictions with bounding boxes."</alt-text>
</graphic></fig>
<p>RGB sensors operate by responding to specific bandwidths within the visible spectrum, primarily the Blue (~450&#x2013;490 nm), Green (~520&#x2013;560 nm), and Red (~635&#x2013;700 nm) bands, to generate color images. Despite their widespread application, RGB sensors are limited to the visible spectrum and are highly sensitive to lighting conditions, which can impact their reliability (<xref ref-type="bibr" rid="B44">Jafarbiglu, 2023</xref>). These sensors are often integrated with machine learning (ML) and deep learning (DL) algorithms for weed detection and real-time classification. The general workflow involves acquiring RGB images at specific frame rates with high spatial resolution. These images are then processed through neural networks or detection algorithms, which generate predictions and real-time visualizations. Detection algorithms form the computational core of intelligent weeding systems, transforming raw sensor data into actionable insights. These algorithms typically employ image processing pipelines -including segmentation, feature extraction and classification &#x2013; to differentiate crops from weeds (<xref ref-type="bibr" rid="B49">Jin et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B88">Rai et&#xa0;al., 2023</xref>). Classical methods use color indices (ExG. ExR), shape descriptors or texture analysis while modern approaches rely on ML and DL frameworks such as Support Vector Machines (SVM), Random Forests (RF), and Convolutional Neural Networks (CNNs). Advanced architectures like YOLO, ResNet and U-Net enable real-time object detection and semantic segmentation, providing precise spatial coordinates for targeted mechanical actuation (<xref ref-type="bibr" rid="B86">Quan et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B88">Rai et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B117">Visentin et&#xa0;al., 2023</xref>). Integrating these algorithms with sensor data is critical to achieving accurate, fast, and autonomous weed identification in dynamic field environments.</p>
<p>Based on these predictions, the weed removal unit is activated for precise weed eradication (<xref ref-type="bibr" rid="B57">Laftouty et&#xa0;al., 2023</xref>). Comparative analysis of multiple deep learning (DL) models including InceptionV3, AlexNet, VGG-16, YOLOv8, and ResNet-50, along with two custom CNN models, revealed that YOLOv8 achieved the highest performance with an accuracy of 100%, while ResNet-50 attained an accuracy of 99%. This study highlights the considerable potential of RGB sensors, particularly when integrated with advanced DL models, for the precise and efficient management of weeds in agricultural settings.</p>
<p>All these sensors play a vital role in weed detection and identification, each operating on distinct principles but sharing the common goal of precise, targeted weed removal. The system&#x2019;s effectiveness relies on seamlessly integrating these detection technologies with mechanical components that perform the actual weed eradication. The following section explores various mechanical weeding end effectors, the system&#x2019;s primary interface with the field. A thorough understanding of these end effectors is essential to optimize their compatibility with different detection systems and field conditions.</p>
</sec>
<sec id="s3_2_2">
<label>3.2.2</label>
<title>Mechanical weeding end effectors</title>
<p>End effectors in mechanical weeding systems play a pivotal role as the specialized tools mounted at the tip of the actuation unit, designed to interact with soil and effectively manipulate it to target and destroy weeds. These tools, which can be categorized as either passive or active, are essential for achieving precise and efficient weed control. Their performance and effectiveness are closely linked to the type of actuation unit employed, influencing the way the mechanical tools interact with weeds and soil (<xref ref-type="bibr" rid="B39">Haag, 2021</xref>; <xref ref-type="bibr" rid="B124">Xiang et&#xa0;al., 2024</xref>).</p>
<p>A comprehensive review of the literature reveals that several mechanisms and implementations have been developed and are in practice. Traditional designs, such as rotary and brush hoes, vertically mounted cultivators, tooth and tine harrows, sweeps, hoe blades, and torsion weeders, operate as passive tools for mechanical weeding (<xref ref-type="bibr" rid="B10">Balas et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B105">Stearns, 2021</xref>). These tools generally penetrate the soil to depths of 2&#x2013;4 cm and are primarily used for inter-row weed control. Recent advancements in end effector design emphasize scientific approaches to manipulating soil to uproot, bury, cut, or drill weeds effectively, as noted by <xref ref-type="bibr" rid="B23">Chicouene (2007)</xref>. These fundamental principles are critical for achieving efficient weed removal in real time. Modern mechanical.</p>
<p>Weeding technologies focus on addressing the challenge of intra-row weed control, which is inherently more complex than inter-row weeding. Intra-row weeds grow unpredictably and randomly among crops, requiring end effectors to operate with exceptional precision to avoid crop damage. In contrast, inter-row weeds, which grow between rows of crops, can be managed more effectively using conventional tools with adequate passes (<xref ref-type="bibr" rid="B84">Pradel et&#xa0;al., 2022</xref>). To handle the complexities of intra-row weeding, end effectors are being engineered with advanced designs that ensure precise targeting and accurate steering. Calibration and control are critical components in these systems to protect crops while removing weeds efficiently (<xref ref-type="bibr" rid="B91">Reiser et&#xa0;al., 2019</xref>). For a generalized understanding, end effectors for mechanical weeding can be broadly classified into three categories: passive, active, and hybrid. This classification encompasses the design principles of the tools, their modes of interaction with soil, and the processes involved in weed destruction. These designs reflect the advancements in the field and underscore the ongoing efforts to develop tools capable of addressing the growing demands of precision agriculture. <xref ref-type="fig" rid="f9"><bold>Figure&#xa0;9</bold></xref> illustrates this classification and provides an overview of the diverse end effector designs currently available for mechanical weeding.</p>
<fig id="f9" position="float">
<label>Figure&#xa0;9</label>
<caption>
<p>Types of end effectors available and the category (<xref ref-type="bibr" rid="B20">Chandel et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B58">Langsenkamp et&#xa0;al., 2014</xref>).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1734507-g009.tif">
<alt-text content-type="machine-generated">Flowchart illustrating types of mechanical weeding end-effectors. Central circle labeled &#x201c;Mechanical Weeding End-Effectors&#x201d; connects to three categories: &#x201c;Passive,&#x201d; &#x201c;Active,&#x201d; and &#x201c;Hybrid.&#x201d; &#x201c;Passive&#x201d; includes manual tools like garden rakes and tractor-drawn tools like torsion weeders. &#x201c;Active&#x201d; involves robotic weeders and drills. &#x201c;Hybrid&#x201d; integrates passive and active techniques, emphasizing inter and intra-row weed removal.</alt-text>
</graphic></fig>
<p><xref ref-type="fig" rid="f10"><bold>Figure&#xa0;10</bold></xref> demonstrates various types of end effectors developed for mechanical weed removal, designed to address both inter-row and intra-row crop scenarios. These designs are tailored based on the size and growth stage of the weeds being targeted. Weeds in their early growth stages can be selectively eradicated using robotic end effector tools engineered for precision removal. Conversely, intra-row weeds that are distributed extensively across the soil surface often require continuous operation tools, such as rotary cultivators or weeding knives. These tools are designed to manipulate the entire soil area between crops consistently, ensuring comprehensive weed removal. The selection of an appropriate end effector is influenced by several critical factors, including the size and type of weeds, the prevailing soil conditions, and the required frequency of operation. This specificity ensures optimal performance of the weeding system and minimizes the risk of crop damage while achieving effective weed control (<xref ref-type="bibr" rid="B8">Asaf et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B128">Zawada et&#xa0;al., 2023</xref>).</p>
<fig id="f10" position="float">
<label>Figure&#xa0;10</label>
<caption>
<p>Different end effectors developed for mechanical weeding: <bold>(a)</bold> IIIR weeder (<xref ref-type="bibr" rid="B20">Chandel et&#xa0;al., 2021</xref>), <bold>(b)</bold> weeding knives (<xref ref-type="bibr" rid="B52">Kennedy et&#xa0;al., 2020</xref>), <bold>(c)</bold> rotary cultivator (<xref ref-type="bibr" rid="B109">Tillett et&#xa0;al., 2008</xref>), <bold>(d)</bold> tube stamp weeder (<xref ref-type="bibr" rid="B58">Langsenkamp et&#xa0;al., 2014</xref>), <bold>(e)</bold> Finger weeder (<xref ref-type="bibr" rid="B63">Machleb et&#xa0;al., 2021</xref>), <bold>(f)</bold> brush weeder (<xref ref-type="bibr" rid="B91">Reiser et&#xa0;al., 2019</xref>), <bold>(g)</bold> reciprocating elastic comb weeder (<xref ref-type="bibr" rid="B126">Ye et&#xa0;al., 2023</xref>), <bold>(h)</bold> drill end effector (<xref ref-type="bibr" rid="B119">Wang et&#xa0;al., 2024</xref>), <bold>(i)</bold> weeding brush (<xref ref-type="bibr" rid="B45">Jiang et&#xa0;al., 2023b</xref>), <bold>(j)</bold> mechanical stamp (<xref ref-type="bibr" rid="B123">Wu et&#xa0;al., 2020</xref>), <bold>(k)</bold> disc weeding knife (<xref ref-type="bibr" rid="B86">Quan et&#xa0;al., 2022</xref>), <bold>(l)</bold> robotic gripper weeder (<xref ref-type="bibr" rid="B117">Visentin et&#xa0;al., 2023</xref>).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1734507-g010.tif">
<alt-text content-type="machine-generated">Panel of various agricultural machinery and devices for automated farming, including mechanical weeders. robotic arms and sensor equipped tools working in the field. Each image depicts different designs and mechanisms for smart mechanical weeding practices showcasing technological advancements in agriculture.</alt-text>
</graphic></fig>
<p>For effective and intelligent mechanical weeding, integrating sensing and detection technologies with actuated removal units is crucial. These systems operate together, where sensing and detection technologies identify and distinguish weeds from crops, while the actuated removal units perform precise mechanical actions to eliminate the targeted weeds. The integration required real-time synchronization between detection outputs and the end effector responses to ensure precise targeting and minimal crop disturbance. Building on the foundation of sensing and detection systems and the various types of end effectors discussed earlier, the next step in advancing precision weed management focuses on integrating these tools with robotic platforms. This integration not only enables autonomous operation but also ensures adaptability and efficient handling in different field conditions. The details of this transformative approach are examined in the following section.</p>
</sec>
<sec id="s3_2_3">
<label>3.2.3</label>
<title>Vision guided actuation and system integration</title>
<p>In conventional agricultural practices, mechanical weeding has traditionally been carried out either manually or by integrating conventional mechanized equipment with tractors. This method, while effective, heavily relies on the precise calibration of the equipment and the professional expertise of the operator (<xref ref-type="bibr" rid="B42">Hussain et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B62">Machleb et&#xa0;al., 2020</xref>). Consequently, their implementation is often labor-intensive and associated with high operational costs. To address these limitations, the adoption of autonomous methods for mechanical weeding has emerged as a promising and reliable alternative. These methods not only facilitate intelligent weed eradication but also significantly enhance operational efficiency (<xref ref-type="bibr" rid="B61">Liu et&#xa0;al., 2023</xref>). Autonomous weeding machines, often referred to as weeding robots, operate based on predefined rules and program logic. The primary objective of these systems is to accurately identify, classify, and localize weeds. Once detected, the integrated actuation unit removes the weeds autonomously, guided by the programmed algorithms. These systems represent a convergence of artificial intelligence (AI), robotics, and agricultural technology, forming a robust solution for precision weeding (<xref ref-type="bibr" rid="B14">Bernier, 2024</xref>). To establish a robotic platform dedicated to mechanical weeding, its components must be systematically organized to ensure efficient operation. <xref ref-type="fig" rid="f11"><bold>Figure&#xa0;11</bold></xref> provides a detailed illustration of the operational workflow of a ground robotic platform designed for mechanical weeding. For example, <xref ref-type="bibr" rid="B117">Visentin et&#xa0;al. (2023)</xref> developed a robotic platform capable of addressing both intra-row and inter-row weed removal, featuring operational and control architecture comprising key components. First, sensing and weed detection is achieved using an RGB sensor or camera that captures real-time field images, which are then preprocessed for weed detection. ML or DL-based algorithms analyze these images to determine the precise positions of weeds. Second, a programmable logic controller (PLC), which may include a standalone microcontroller, computer, or edge computing devices such as Jetson or Raspberry Pi, processes the detection data. These systems execute real-time computer vision algorithms and generate commands based on detection logic. Finally, the processed commands are transmitted to the mechanical actuation unit, responsible for the precise removal of weeds, operating in real-time to ensure accuracy and efficiency. This diagram encapsulates the systematic flow of data from sensing to actuation, highlighting the integration of advanced technologies for autonomous weed control.</p>
<fig id="f11" position="float">
<label>Figure&#xa0;11</label>
<caption>
<p>Schematic representation demonstrating the integration with the robotic platform for intelligent mechanical weeding systems.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1734507-g011.tif">
<alt-text content-type="machine-generated">Flowchart illustrating a robotic weeder system. The process begins with image acquisition for weed identification using deep learning, followed by position estimation in a ROS environment. Position data is processed by a Jetson computer with a PLC, which relays commands to a robotic arm for mechanical weeding. The system integrates sensor fusion with RTK-GPS, LiDAR, and cameras for navigation and communication, culminating in the operation of a robotic weeder.</alt-text>
</graphic></fig>
<p>This is a basic workflow that is followed by robotic weeders for real-time autonomous mechanical weeding. There are several weeding robots and mechanical weeding systems which have been developed so far, some of which are research-based, developed by some organizations, while others are commercially available, industry-grade. For instance, a work conducted by <xref ref-type="bibr" rid="B22">Chang et&#xa0;al. (2021)</xref> used a deep convolutional neural network for weed identification and localization, and a pyramid-shaped shovel-like end effector for effectively uprooting the weeds from the soil. The system has been tested by being operated at a speed of 20 cm/s and has proved to be effective with a 92.6% success rate in weed removal. In a study by <xref ref-type="bibr" rid="B102">Shanmugam and Asokan (2015)</xref>, a machine vision-based mechanical weeding system was developed for turmeric fields, utilizing a robotic arm controlled via a MATLAB platform. The system achieved a weed-plant separation accuracy of 94%, with the ExG-ExR method further enhancing separation accuracy to 98%.</p>
<p><xref ref-type="table" rid="T2"><bold>Table&#xa0;2</bold></xref> provides a comprehensive analysis of various advanced mechanical weed management systems that have been developed over the years.</p>
<table-wrap id="T2" position="float">
<label>Table&#xa0;2</label>
<caption>
<p>Descriptive analysis of the intelligent mechanical weed management systems.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="left">Robot/weeding platform</th>
<th valign="top" align="left">Sensors/software</th>
<th valign="top" align="left">Weed identification method</th>
<th valign="top" align="left">Mechanical weeding mechanism</th>
<th valign="top" align="left">Crop</th>
<th valign="top" align="left">Performance</th>
<th valign="top" align="left">Reference</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Intra-row weeding system</td>
<td valign="top" align="left">Industrial camera</td>
<td valign="top" align="left">SPH-YOLOv5x model for crop detection</td>
<td valign="top" align="left">Real-time weed knife control system</td>
<td valign="top" align="left">lettuce</td>
<td valign="top" align="left">Weed identification model accuracy of 95%, mAP value of 96% and weed removal accuracy of 80.25% at 2.38 km/h</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B46">Jiang et&#xa0;al. (2023a)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Intra-row weeder</td>
<td valign="top" align="left">Ultrasonic sensor</td>
<td valign="top" align="left">Ultrasonic sensor-based plant detection</td>
<td valign="top" align="left">Hydraulic actuated pinch roller weeding mechanism</td>
<td valign="top" align="left">Cabbage</td>
<td valign="top" align="left">Plant detection accuracy has R<sup>2</sup> value of 0.94, and the weed-specific accuracy of 33.9% for Southern crabgrass</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B97">Saber et&#xa0;al. (2015)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Odd Bot Maverick</td>
<td valign="top" align="left">High-resolution RGB camera and 3D depth camera</td>
<td valign="top" align="left">AI-driven vision-based weed detection system</td>
<td valign="top" align="left">Two delta arms with grippers for pressing and pulling out weeds</td>
<td valign="top" align="left">Carrots, Onions, &amp; Chicory</td>
<td valign="top" align="left">Precise weed removal with an accuracy of 2 mm and operational speed of 2 weeds/sec at 0.6 km/h</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B76">Odd.Bot (2024)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Smart weeding machine</td>
<td valign="top" align="left">Digital camera</td>
<td valign="top" align="left">YOLOv3-based weeds detection and localization</td>
<td valign="top" align="left">DC motor-driven claw rake weeding tool</td>
<td valign="top" align="left">N/A</td>
<td valign="top" align="left">Weed detection accuracy of 95.6% at 5 frames per second. At 15 cm/s weeding success rate of 92.6%</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B22">Chang et&#xa0;al. (2021)</xref></td>
</tr>
<tr>
<td valign="top" align="left">BoniRob</td>
<td valign="top" align="left">Camera for visual servoing</td>
<td valign="top" align="left">Human image processing and position transfer via mobile network</td>
<td valign="top" align="left">Linear actuated tube stamp</td>
<td valign="top" align="left">Maize</td>
<td valign="top" align="left">Weed control rate of 93.86% was attained</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B58">Langsenkamp et&#xa0;al. (2014)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Weed Spider Robotic Weeder</td>
<td valign="top" align="left">LiDAR, GNSS, camera</td>
<td valign="top" align="left">LiDAR-based weed mapping</td>
<td valign="top" align="left">Mechanical weeding arm with blades and automatic depth control</td>
<td valign="top" align="left">Tobacco, sweet potatoes, soybean</td>
<td valign="top" align="left">95% reduction in labor costs and works up to 3.5 acres per hour</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B38">GreenTech Robotics (2023)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Intelligent intra-row weeding robot</td>
<td valign="top" align="left">Industrial grade camera module (RER-USBFHD01M-LS36, Shenzhen, China)</td>
<td valign="top" align="left">YOLOv5 network selected as the vision system</td>
<td valign="top" align="left">Weeding brush fitted with wire brush and brush rollers with roller support</td>
<td valign="top" align="left">Maize, Chinese cabbage</td>
<td valign="top" align="left">Weed removal rate in maize and Chinese cabbage 90% and 94.5% respectively and crop damage of 1.9 and 0.8%</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B45">Jiang et&#xa0;al. (2023b)</xref></td>
</tr>
<tr>
<td valign="top" align="left">AgBotII</td>
<td valign="top" align="left">RGB and NIR camera (IDS UI1240SE 1.3MP global shutter camera)</td>
<td valign="top" align="left">Vision based online detection and classification based on color spaces</td>
<td valign="top" align="left">Robotic Blade hoe</td>
<td valign="top" align="left">Cotton, wild oats &amp; sow thistle</td>
<td valign="top" align="left">Weed classification accuracy was 96%. Highest performance in cotton (97.8%)</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B11">Bawden et&#xa0;al. (2017)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Rover</td>
<td valign="top" align="left">Intel RealSense D435i, RGB-D camera</td>
<td valign="top" align="left">CNN based on ResNet18, part of PlantNet</td>
<td valign="top" align="left">3D printed claw gripper on a gantry robot</td>
<td valign="top" align="left">Lettuce</td>
<td valign="top" align="left">Crop and weed detection accuracy above 97% and effective weed removal ~85% with crop damage less than 5%</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B117">Visentin et&#xa0;al. (2023)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Universal mobile robot</td>
<td valign="top" align="left">Industrial USB digital camera (6-DZM-12, PHZL Co., Ltd., Shenzhen City, China)</td>
<td valign="top" align="left">YOLOv3 network for real-time weed detection</td>
<td valign="top" align="left">Disc weeding knives (blade, wedge, and plough) with arbours</td>
<td valign="top" align="left">Maize</td>
<td valign="top" align="left">Weed removal rate of 85.91% and crop injury of 1.17% and YOLOv3 detection accuracies for maize and weeds were 98.5% and 90.9% respectively</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B86">Quan et&#xa0;al. (2022)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Phoenix</td>
<td valign="top" align="left">2D laser scanner, Electromechanical sensor and sonar sensor</td>
<td valign="top" align="left">Trunk position detection based on the sonar</td>
<td valign="top" align="left">Electric rotary weeder</td>
<td valign="top" align="left">Vineyard</td>
<td valign="top" align="left">Average tilled area was 65% for feeler and 82% for sonar</td>
<td valign="top" align="left">Resier et&#xa0;al. (2019)</td>
</tr>
<tr>
<td valign="top" align="left">Tertill</td>
<td valign="top" align="left">Capacitive sensors</td>
<td valign="top" align="left">Capacitive sensors detect tall plants and to avoid obstacles</td>
<td valign="top" align="left">Four camber wheels (grousers), weed whacker</td>
<td valign="top" align="left">Pearl millet</td>
<td valign="top" align="left">Efficiency ranged from 54-75%</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B99">Sanchez and Gallandt (2021)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Weeding robot</td>
<td valign="top" align="left">Laser ranging sensors (BL-200NMZ)</td>
<td valign="top" align="left">Laser ranging sensor-based plant position estimation</td>
<td valign="top" align="left">Reciprocating elastic comb</td>
<td valign="top" align="left">Soybean</td>
<td valign="top" align="left">Weeding rate of 98.2% and crop injury of 1.69% at optimal speed of 0.31 m/s at 29.06 mm depth</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B126">Ye et&#xa0;al. (2023)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Modified BoniRob</td>
<td valign="top" align="left">Global shutter camera (JAI AD&#x2010;130 GE) and 8 mm lens (Fujinon TF15&#x2010;DA&#x2010;8), narrow&#x2010;beam sonars (SRF235 Ultrasonic Range Finder)</td>
<td valign="top" align="left">Na&#xef;ve Bayes filtering, intra and inter camera visual tracking</td>
<td valign="top" align="left">18 stamping tools composed of pneumatic cylinder</td>
<td valign="top" align="left">Sugar beet</td>
<td valign="top" align="left">Operated at 0.05 m/s on flat and rough terrain efficiency was 99.11% and 99.17% respectively</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B123">Wu et&#xa0;al. (2020)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Robovator</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">Image processing-based binary segmentation</td>
<td valign="top" align="left">Torsion weeder with square tines</td>
<td valign="top" align="left">White cabbage</td>
<td valign="top" align="left">Intelligent weeding can remove weeds closer to crops without subsequent manual weeding</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B69">Melander et&#xa0;al. (2015)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Mechanical weeder</td>
<td valign="top" align="left">Red-infrared camera,</td>
<td valign="top" align="left">Machine vision system with the software IMPASS</td>
<td valign="top" align="left">Motorized finger weeder</td>
<td valign="top" align="left">Sugar beet</td>
<td valign="top" align="left">Intra row weed control efficacy ranged 87 to 91% in 2017 and 91 to 94% in 2018</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B63">Machleb et&#xa0;al. (2021)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Tractor- intra row weeding platform</td>
<td valign="top" align="left">color camera (Do3think CM036) and lens (AZURE-0420mm)</td>
<td valign="top" align="left">Image processing algorithm consisting of thresholding, refinement and filtering</td>
<td valign="top" align="left">C-type vertical axis weeding blade</td>
<td valign="top" align="left">Cauliflower, lettuce and maize</td>
<td valign="top" align="left">Identification rates of crops were above 95% and operating speed of 2 km/hr improves efficiency 34.4 times of a manual labor</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B73">Nan et&#xa0;al. (2015)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Camera guided hoeing system</td>
<td valign="top" align="left">Two contact sensors, 3 m wide cameraguided hoeing system (K.U.L.T.-Kress Umweltschonende Landtechnik, K&#xfc;rnbach, Germany)</td>
<td valign="top" align="left">RGB camera for crop row scan and analysis and hoe alignment adjustment with contact sensors</td>
<td valign="top" align="left">Hoeing implement (sweep) with contact disc</td>
<td valign="top" align="left">Maize</td>
<td valign="top" align="left">demonstrates effective slope force compensation on various gradients, enabling precise hoeing on sloping terrains</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B104">Spaeth et&#xa0;al. (2024)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Tractor with hoe Chopstar</td>
<td valign="top" align="left">2D RGB camera</td>
<td valign="top" align="left">Colour and height-based camera setting adjustment</td>
<td valign="top" align="left">Row hoeing with chopstar, post emergent hoeing goosefeet sweeps</td>
<td valign="top" align="left">Sugar beet</td>
<td valign="top" align="left">Interrow weed control efficiency was between 94-98% for the goose feet sweeps</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B80">Parasca et&#xa0;al. (2024)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Tractor- camera steered mechanical weeder</td>
<td valign="top" align="left">OEM Claas 3-D stereo camera</td>
<td valign="top" align="left">3D camera and artificial lightning-based row detection for weeding hoe automatic steering, crop identification based on size</td>
<td valign="top" align="left">Duck foot blades for inter row with four different intra row weeders- flexible finger weeder, torsion weeder, rotary harrow or ridging blades</td>
<td valign="top" align="left">Maize, Soybean, Sugar beet</td>
<td valign="top" align="left">Camera steered hoeing had an efficiency of 78% and yield increase in white sugar, maize and soybean by 39, 43 and 58% respectively.</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B56">Kunz et&#xa0;al. (2018)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Tractor- rotating vertical tines</td>
<td valign="top" align="left">RGB-D sensor (Kinetic version 2), photonic sensor for depth estimation</td>
<td valign="top" align="left">Computer vision and image processing including feature extraction</td>
<td valign="top" align="left">Spinning tines mounted on pivoting arms with servo motors</td>
<td valign="top" align="left">Broccoli &amp; lettuce</td>
<td valign="top" align="left">Segmentation accuracy was in the range of 87.2 to 96.6% for broccoli and 74.12% to 92.4% for lettuce.</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B33">Gai et&#xa0;al. (2020)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Robotic weeder</td>
<td valign="top" align="left">Digital color camera (Model piA240012gc) with fixed focal length lens (M0814-MP2&#x2013;8 mm)</td>
<td valign="top" align="left">Geometric appearance-based crop detection</td>
<td valign="top" align="left">Robotic weeding knife</td>
<td valign="top" align="left">tomato</td>
<td valign="top" align="left">Tomato stems detected with 99.19% accuracy</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B89">Raja et&#xa0;al. (2020)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Self-propelled inter row weeder</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">Visual recognition system for path planning</td>
<td valign="top" align="left">Weeding wheels with rake teeth</td>
<td valign="top" align="left">Paddy</td>
<td valign="top" align="left">At a forward speed of 0.64 m/s, weed rate prediction accuracy was 88.43%</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B108">Tang et&#xa0;al. (2021)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Tractor with harrow</td>
<td valign="top" align="left">5 MP RGB camera and 25 mm lens, RTK-GNSS receiver</td>
<td valign="top" align="left">AI algorithm and DL-based weed/crop cover detection</td>
<td valign="top" align="left">Weeding harrow tines</td>
<td valign="top" align="left">Barley</td>
<td valign="top" align="left">R<sup>2</sup> prediction value of 95.9% for weed cover and 98.6% for crop cover in pre-harrow images and 88.4 and 97.7% for post harrow images</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B12">Berge et&#xa0;al. (2024)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Classification based robotic weed control</td>
<td valign="top" align="left">industrial camera (HF868-2)</td>
<td valign="top" align="left">LettWd-YOLOv8l model for object detection</td>
<td valign="top" align="left">Weeding knives</td>
<td valign="top" align="left">Lettuce</td>
<td valign="top" align="left">Achieved 99.73% precision and 99.5% F1-score on indoor dataset under varied lighting, intra-row weeding rate of 83.7% at 3.28 <inline-formula>
<mml:math display="inline" id="im2"><mml:mrow><mml:mi>k</mml:mi><mml:mi>m</mml:mi><mml:mo stretchy="false">/</mml:mo><mml:mi>h</mml:mi></mml:mrow></mml:math></inline-formula></td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B129">Zhao et&#xa0;al. (2024)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Precision mechanical weeder</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">Deep learning-based weed detection and signal-based weeder actuation</td>
<td valign="top" align="left">Spiral bar type weeding head</td>
<td valign="top" align="left">Corn</td>
<td valign="top" align="left">95% weed removal rate and 3% crop at a movement speed of 80 mm/s</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B41">Hu et&#xa0;al. (2024)</xref>`</td>
</tr>
<tr>
<td valign="top" align="left">Pneumatic precision seeder Optima V</td>
<td valign="top" align="left">Camera (KULT iVision PV)</td>
<td valign="top" align="left">Vision based<break/>Row detection</td>
<td valign="top" align="left">Cutting discs, no till sweeps,</td>
<td valign="top" align="left">Maize</td>
<td valign="top" align="left">Bidirectional hoeing increased efficiency from 80% to 95%</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B74">Naruhn et&#xa0;al. (2023)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Intra-row weeder</td>
<td valign="top" align="left">Ultrasonic sensor (28015 PING), proximity sensors (18&#x2013;14 DP2)</td>
<td valign="top" align="left">Crop weed sensing with position sensor</td>
<td valign="top" align="left">Vertical axis rotary shaft with weeding blade</td>
<td valign="top" align="left">Green chili &amp; tomato</td>
<td valign="top" align="left">Effective weed control efficiency above 65% and crop damage less than 25%</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B54">Kumar et&#xa0;al. (2020)</xref></td>
</tr>
<tr>
<td valign="top" align="left">FarmDroid FD20</td>
<td valign="top" align="left">RTK-GPS</td>
<td valign="top" align="left">Early-stage weeding based on high precision GPS technology</td>
<td valign="top" align="left">6 hoeing tools</td>
<td valign="top" align="left">Sugar beet</td>
<td valign="top" align="left">Increase in sugar beet yields from 40-60%</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B94">Rossmadl et&#xa0;al. (2023)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Mechatronic Intra-row weeding system</td>
<td valign="top" align="left">CMUcam5 Pixy camera</td>
<td valign="top" align="left">Vision based crop detection and localization</td>
<td valign="top" align="left">Crescent shaped blade</td>
<td valign="top" align="left">Corn</td>
<td valign="top" align="left">Demonstrates effective crop recognition and weeding under controlled conditions</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B13">Berkmortel et&#xa0;al. (2021)</xref></td>
</tr>
<tr>
<td valign="top" align="left">BonnBot-I</td>
<td valign="top" align="left">Camera (Intel RealSense D455), Inertial Navigation System</td>
<td valign="top" align="left">Instance segmentation based on Mask-RCNN</td>
<td valign="top" align="left">Mechanical hoeing</td>
<td valign="top" align="left">Corn</td>
<td valign="top" align="left">The Normalized Absolute Error reduced from 8.3% to 3.5% for the weeding platform</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B5">Ahmadi et&#xa0;al. (2022)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Weed hoeing</td>
<td valign="top" align="left">RGB camera</td>
<td valign="top" align="left">Image segmentation. Background separation by Kalman filter</td>
<td valign="top" align="left">No-till sweeps</td>
<td valign="top" align="left">Winter wheat</td>
<td valign="top" align="left">Camera guided weeding efficiency 72-96% for inter-row and 21-91% for intra-row</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B36">Gerhards et&#xa0;al. (2020)</xref></td>
</tr>
<tr>
<td valign="top" align="left">Agrobot</td>
<td valign="top" align="left">HP webcam (W100HP) &#x2013; 1280 <inline-formula>
<mml:math display="inline" id="im3"><mml:mo>&#xd7;</mml:mo></mml:math></inline-formula>720 resolution</td>
<td valign="top" align="left">Image processing and DL-based weed identification; MobileNetV2 and SSD FPN-Lite</td>
<td valign="top" align="left">15 cm wide shaped blades (inter row); 15 cm flat sharp blades (intra row)</td>
<td valign="top" align="left">N/A</td>
<td valign="top" align="left">Weed detection model accuracy of 99%</td>
<td valign="top" align="left"><xref ref-type="bibr" rid="B51">Jog and Agashe (2024)</xref></td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Based on these research studies, it has been observed that most of the works have focused on accurately detecting the weeds in real-time field conditions and using custom end effectors for weed removal. The idea of integrating these detection models with the Program Logic Controller or edge devices for accurate robot actuation is a challenging task. The main concern lies in getting the exact position of the weed from the detection models and synchronizing them with the inverse kinematics or the actuation unit of the robotic system for real-time weeding.</p>
<p>In the context of weeding, there are two cases based on which the robotic system removes the weeds. Depending on the degrees of freedom of the automated unit, the conceptualization of the actuation logic is determined. Firstly, the robotic weeder with an actuation unit with a single degree of freedom has a simple operational logic for removing the intra-row weeds based on detecting the crops under consideration at the time of operation. <xref ref-type="fig" rid="f12"><bold>Figure&#xa0;12</bold></xref> shows examples of the operation of the automated vision-based weeding unit that does not require localization to eradicate weeds; rather, it detects the presence of crops and tills the intermediate soil area. A camera mounted near the actuation unit detects the presence of crops, and in turn, the actuation unit operates to remove the weeds in no-crop regions and vice versa. This type of actuation is effective if the position of the weed is uniform; otherwise, depending on the design and the structure of the contact tool, there are chances of large amounts of unnecessary soil manipulation. Secondly, another approach to automated and robotic weeding involves the use of a robotic arm for the purpose. The concept of weed detection and localization for the actuation of robotic arms is the same for all the types of robots that are available. <xref ref-type="fig" rid="f13"><bold>Figure&#xa0;13</bold></xref> shows an experimental setup with a robotic manipulator arm and a camera mounted over the target object for real-time coordinate estimation and actuation.</p>
<fig id="f12" position="float">
<label>Figure&#xa0;12</label>
<caption>
<p>Vision-based weed removal without object localization <bold>(a)</bold> Intra-row weeding platform using camera and rotating disc weeding knives (<xref ref-type="bibr" rid="B86">Quan et&#xa0;al., 2022</xref>), <bold>(b)</bold> experimental setup on artificial soil bin with weeding knives control system. (<xref ref-type="bibr" rid="B46">Jiang et&#xa0;al., 2023a</xref>).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1734507-g012.tif">
<alt-text content-type="machine-generated">Panel a illustrates a diagram of a weeding robot platform moving along crop rows with labeled zones for camera view, weeding work area, and no-till zone. Panel b shows an actual robotic system in a lab setting, featuring a structure above a plant on soil, designed for automated weeding.</alt-text>
</graphic></fig>
<fig id="f13" position="float">
<label>Figure&#xa0;13</label>
<caption>
<p>Object localization for coordinate estimation and robot actuation.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1734507-g013.tif">
<alt-text content-type="machine-generated">Diagram of a robotic setup featuring a manipulator arm mounted on a frame with an attached camera. The camera is focused on a study object, a plant, positioned below. An inset highlights the object's bounding box and center, denoted by coordinates \( X^{cam}_{obj}, Y^{cam}_{obj} \). Coordinate axes for world, robot base, and plant are shown.</alt-text>
</graphic></fig>
<p>The main complexity in using a robotic arm for weeding lies in estimating the position of the object in real time with respect to the robot base. In practice, detections arrive in the camera frame. For inverse kinematics (IK), however, the target must be expressed in the robot base frame. Suppose the detected bounding-box center is <inline-formula>
<mml:math display="inline" id="im4"><mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>o</mml:mi><mml:mi>b</mml:mi><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msubsup><mml:mo>,</mml:mo><mml:msubsup><mml:mi>Y</mml:mi><mml:mrow><mml:mi>o</mml:mi><mml:mi>b</mml:mi><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msubsup></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow></mml:math></inline-formula>(and depth <inline-formula>
<mml:math display="inline" id="im5"><mml:mrow><mml:msubsup><mml:mi>Z</mml:mi><mml:mrow><mml:mi>o</mml:mi><mml:mi>b</mml:mi><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msubsup></mml:mrow></mml:math></inline-formula> when available), the homogenous point in the camera frame is represented as</p>
<disp-formula id="eq1"><label>(1)</label>
<mml:math display="block" id="M1"><mml:mrow><mml:mmultiscripts><mml:mi>P</mml:mi><mml:mrow><mml:mi>o</mml:mi><mml:mi>b</mml:mi><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:mmultiscripts><mml:mo>=</mml:mo><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable><mml:mtr><mml:mtd><mml:mrow><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>o</mml:mi><mml:mi>b</mml:mi><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msubsup></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:msubsup><mml:mi>Y</mml:mi><mml:mrow><mml:mi>o</mml:mi><mml:mi>b</mml:mi><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msubsup></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:msubsup><mml:mi>Z</mml:mi><mml:mrow><mml:mi>o</mml:mi><mml:mi>b</mml:mi><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msubsup></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:mrow></mml:math>
</disp-formula>
<p>To express the object in the base frame, pre-multiply by the homogeneous transform of the camera with respect to the base (<xref ref-type="bibr" rid="B19">Cao et&#xa0;al., 2019</xref>).</p>
<disp-formula id="eq2"><label>(2)</label>
<mml:math display="block" id="M2"><mml:mrow><mml:mmultiscripts><mml:mi>P</mml:mi><mml:mrow><mml:mi>o</mml:mi><mml:mi>b</mml:mi><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi><mml:mi>a</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:mmultiscripts><mml:mo>=</mml:mo><mml:mo>&#xa0;</mml:mo><mml:mmultiscripts><mml:mi>T</mml:mi><mml:mrow><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>m</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi><mml:mi>a</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:mmultiscripts><mml:mo>&#xa0;</mml:mo><mml:mo>.</mml:mo><mml:mo>&#xa0;</mml:mo><mml:mo>&#xa0;</mml:mo><mml:mmultiscripts><mml:mi>P</mml:mi><mml:mrow><mml:mi>o</mml:mi><mml:mi>b</mml:mi><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:mmultiscripts></mml:mrow></mml:math>
</disp-formula>
<p>Here <inline-formula>
<mml:math display="inline" id="im6"><mml:mrow><mml:mmultiscripts><mml:mi>P</mml:mi><mml:mrow><mml:mi>o</mml:mi><mml:mi>b</mml:mi><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi><mml:mi>a</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:mmultiscripts></mml:mrow></mml:math></inline-formula> refers to the position of the object with respect to the base of the robot and <inline-formula>
<mml:math display="inline" id="im7"><mml:mrow><mml:mmultiscripts><mml:mi>T</mml:mi><mml:mrow><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>m</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi><mml:mi>a</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:mmultiscripts></mml:mrow></mml:math></inline-formula> represents the homogeneous transformation matrix (<xref ref-type="bibr" rid="B17">Briot et&#xa0;al., 2015</xref>) for frame transformation from the camera frame to the base frame. The transform has the standard block form</p>
<disp-formula id="eq3"><label>(3)</label>
<mml:math display="block" id="M3"><mml:mrow><mml:mmultiscripts><mml:mi>T</mml:mi><mml:mrow><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>m</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi><mml:mi>a</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:mmultiscripts><mml:mo>=</mml:mo><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable><mml:mtr><mml:mtd><mml:mi>R</mml:mi></mml:mtd><mml:mtd><mml:mi>t</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:msub><mml:mn>0</mml:mn><mml:mrow><mml:mn>1</mml:mn><mml:mo>&#xd7;</mml:mo><mml:mn>3</mml:mn></mml:mrow></mml:msub></mml:mrow></mml:mtd><mml:mtd><mml:mn>1</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:mo>&#xa0;</mml:mo><mml:mo>&#xa0;</mml:mo><mml:mi>R</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mi>S</mml:mi><mml:mi>O</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mn>3</mml:mn><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:mo>&#xa0;</mml:mo><mml:mo>&#xa0;</mml:mo><mml:mi>t</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mo>&#xa0;</mml:mo><mml:msup><mml:mi>&#x211d;</mml:mi><mml:mn>3</mml:mn></mml:msup><mml:mo>&#xa0;</mml:mo><mml:mo>&#xa0;</mml:mo></mml:mrow></mml:math>
</disp-formula>
<p><xref ref-type="disp-formula" rid="eq1">Equations 1</xref>&#x2013;<xref ref-type="disp-formula" rid="eq3">3</xref> define the full transformation pipeline that maps detections from the camera frame into the robot base frame, enabling IK-based actuation. Where R and t are the camera&#x2019;s orientation and position expressed in the base frame (ontained from hand-eye calibration or kinematic calibration). After applying (2), the arm can attempt IK to reach <inline-formula>
<mml:math display="inline" id="im8"><mml:mrow><mml:mmultiscripts><mml:mi>P</mml:mi><mml:mrow><mml:mi>o</mml:mi><mml:mi>b</mml:mi><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi><mml:mi>a</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:mmultiscripts></mml:mrow></mml:math></inline-formula> if the point lies with the reachable workspace. This pipeline exemplifies the integration of perception and manipulation for precise, sustainable, and efficient weed management.</p>
<p>As illustrated in <xref ref-type="fig" rid="f10"><bold>Figure&#xa0;10</bold></xref>, the overall performance of an autonomous weeding system is substantially affected by its locomotion capabilities. Consequently, the following section explores the path planning and navigation systems that are essential for effective robotic operation in agricultural fields.</p>
</sec>
<sec id="s3_2_4">
<label>3.2.4</label>
<title>Path planning and navigation systems</title>
<p>For autonomous and precise mechanical weeding in real-time field conditions, robots must possess precise positioning capabilities and the ability to autonomously navigate fields intelligently. Robot navigation relies on the ability to continuously determine its real-time position and orientation, enabling path planning and collision avoidance (<xref ref-type="bibr" rid="B101">Shalal et&#xa0;al., 2013</xref>). Various navigation systems have been developed, integrating sensor-based technologies, computational methods, and control strategies as shown in <xref ref-type="fig" rid="f14"><bold>Figure&#xa0;14</bold></xref>. Accurate navigation is fundamental to operational efficiency, as it requires continuous positional tracking and dynamic adjustments in movement to effectively address the challenges inherent in unstructured agricultural environments.</p>
<fig id="f14" position="float">
<label>Figure&#xa0;14</label>
<caption>
<p>Multiple sensor-based robotic platforms: <bold>(a)</bold> Rover platform (<xref ref-type="bibr" rid="B83">Post et&#xa0;al., 2017</xref>), <bold>(b)</bold> BoniRob farming UGV (<xref ref-type="bibr" rid="B85">Pretto et&#xa0;al., 2021</xref>), <bold>(c)</bold> Husky Observer (<xref ref-type="bibr" rid="B24">Clearpath Robotics, 2023</xref>).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1734507-g014.tif">
<alt-text content-type="machine-generated">a) A small autonomous rover with components labeled: stereo vision camera, scanning laser rangefinder, ultrasonic collision sensors, monitor display, communication antenna, GNSS and inertial sensors, onboard computer, and dual hub motors.  b) A large robotic vehicle equipped with dual velodyne laser scanners, RGB camera, GPS, arms with encoders, and a weed intervention module, designed for agricultural applications.  c) A yellow robotic vehicle navigating through a grassy field, featuring various sensors and antennas atop, highlighting its autonomous navigation capabilities.</alt-text>
</graphic></fig>
<p>Agricultural navigation systems face several challenges, including unstructured terrains, environmental noise, and hardware limitations such as wheel slippage and end effector failures (<xref ref-type="bibr" rid="B125">Yao et&#xa0;al., 2024</xref>). GPS provides precise ground vehicle location data through satellite-based positioning, as demonstrated in a study by <xref ref-type="bibr" rid="B72">Mwitta and Rains (2024)</xref>, who integrated GPS with visual navigation for cotton field operations. Their system achieved a lateral deviation of 4.8 cm from the desired path, showcasing the potential of GPS-augmented solutions. However, GPS sensors face challenges in closed environments like indoor conditions, where satellite signal obstruction and weather-induced noise affect performance. The inclusion of vision sensors has been shown to enhance system accuracy under such conditions (<xref ref-type="bibr" rid="B15">Binbin et&#xa0;al., 2021</xref>). LiDAR (Light Detection and Ranging) is another key sensor used in agricultural robot navigation, leveraging laser pulses to measure distances and mapping the environment in real-time. LiDAR sensors facilitate obstacle detection and avoidance, as demonstrated by <xref ref-type="bibr" rid="B1">Aarab (2023)</xref>. For example, <xref ref-type="bibr" rid="B2">Abanay et&#xa0;al. (2022)</xref> developed a navigation system for the AgriEco Robot using a 2D LiDAR sensor integrated with ROS, achieving a lateral error within a few centimeters and an RMS error of 2.99 cm at a navigation speed of 0.44 m/s. Similarly, <xref ref-type="bibr" rid="B64">Malavazi et&#xa0;al. (2018)</xref> utilized LiDAR-based line extraction from two-dimensional point clouds, utilizing the PEARL method and successfully applying the algorithm to the Oz weeding robot.</p>
<p>Relying on a single sensor for navigation can result in reduced accuracy and increased path deviations. Sensor fusion, which combines data from multiple sensors, such as LiDAR, cameras, ultrasonic sensors, and RADAR, addresses these limitations and improves navigation performance (<xref ref-type="bibr" rid="B93">Rigoulet, 2021</xref>). For instance, <xref ref-type="bibr" rid="B116">Velasquez et&#xa0;al. (2022)</xref> developed an autonomous navigation system integrating LiDAR for crop row detection and IMU for enhanced performance.</p>
<p>The system underwent testing over 50.88 km in various field conditions, achieving average distances between interventions of 386.9 m in gap-free fields, 56.1 m in production fields, and 47.5 m in fields with 1 m gaps, demonstrating robust performance in diverse agricultural environments. <xref ref-type="bibr" rid="B112">Upadhyay et&#xa0;al. (2024b)</xref>, have provided a comprehensive review of this navigation approach offering valuable insights into the design and implementation of advanced agricultural robotics.</p>
</sec>
</sec>
</sec>
<sec id="s4">
<label>4</label>
<title>Technical challenges and future directions</title>
<p>The primary objective of a precise mechanical weeding system is to effectively eliminate and manage weeds while minimizing environmental impact, reducing crop injury, and ultimately enhancing net yield. These goals form the foundation for designing and developing an efficient weed management system. Moreover, economic feasibility and platform versatility are crucial factors that influence the adoption of these systems by farmers and stakeholders. These elements play a vital role in motivating users to invest in advanced weed management technologies. Based on an extensive literature review and analysis of documented research examples, it is clear that significant progress is still needed to optimize the performance of such systems to fully realize their potential.</p>
<p>Currently, the most commercially available mechanical weeding systems are tractor-mounted implements designed for traditional weed management methods. However, the adoption of intelligent and autonomous mechanical weeding technologies remains in its early stages. Farmers and end-users are gradually becoming familiar with these advanced systems. Despite significant progress in autonomous agricultural systems, including deep learning and machine vision-based weed detection, several limitations and challenges persist.</p>
<p>Technical challenges in autonomous mechanical weeding systems include precise identification and localization of weeds in real-time scenarios. Although cutting-edge deep learning models, such as the latest variants of the YOLO (You Only Look Once) framework, have shown excellent performance in weed detection and classification, several challenges remain. These challenges encompass issues related to dataset quality and requirements, variations in lighting conditions, morphological similarities between crops and weeds, overlapping images in datasets, and the dynamic nature of environmental conditions. Additionally, the different growth stages of weeds further complicate detection tasks. Beyond these data-related challenges, computational costs, hardware limitations, and potential misidentifications or incorrect detections present significant hurdles. For instance, inadequate datasets or insufficient model training can lead to performance degradation in real-world scenarios, particularly under varied environmental conditions (<xref ref-type="bibr" rid="B87">Rai and Sun, 2024</xref>; <xref ref-type="bibr" rid="B98">Saiwa, 2024</xref>; <xref ref-type="bibr" rid="B121">Wang et&#xa0;al., 2019</xref>).</p>
<p>To address these limitations, deep learning models should be trained on larger and more diverse datasets enhanced through augmentation techniques, such as brightness adjustments, noise addition, and rotation, to simulate real-world variations. Furthermore, the model architecture implemented on edge devices must prioritize lightweight designs to reduce computational complexity while preserving accuracy. Striking a balance between model performance and processing speed is essential in selecting the optimal model for real-time weed detection tasks. By ensuring this trade-off, the models can achieve higher accuracy and efficiency in field operations, ultimately leading to more effective and reliable weed management solutions.</p>
<p>Another major challenge in autonomous mechanical weeding systems is the potential damage to crops caused by the physical interaction of mechanical tools with the soil during weeding operations. This issue is critically important, serving as a key selection criterion for any mechanical weeding system. While the primary aim of these systems is to eliminate weeds, unintended harm to crops can reduce yield, thereby undermining the system&#x2019;s effectiveness. Although inter-row weeders generally operate between crop rows and pose minimal risk to crops, the concern increases with intra-row weeders, which operate in closer proximity to the soil and crops.</p>
<p>The weeding end effector analyzed in this review exhibit a diverse array of operational methodologies, each characterized by distinct levels of efficiency and an associated percentage of crop injury. At present, crop injury and inadvertent interactions, arising from misidentifications, variations in weed morphology, and diverse soil types, continue to pose challenges that have not yet been thoroughly addressed. The design of end effector capable of precise targeting with minimal impact on crops represents an area of ongoing scholarly inquiry. Approaches designed to tackle these challenges encompass the advancement of high-resolution weed mapping techniques, facilitating the creation of an accurate real-time distribution of both weeds and crops, thereby clearly delineating the operational territory for the end effector. Furthermore, the designs of end-effectors ought to be adaptable to various weed growth stages and soil conditions.</p>
<p>To tackle the challenges related to autonomous mechanical weeding systems, the manufacturing and research sectors must follow a comprehensive set of guidelines and standards. These guidelines should act as a benchmark for creating effective and broadly acceptable systems capable of achieving precise weed eradication and sustainable management. The following points outline the essential requirements for the proper functioning and operation of weed management systems:</p>
<list list-type="simple">
<list-item>
<p>a. A compact, efficient, and lightweight detection model should be chosen for the task of weed identification and localization. These models must undergo rigorous testing in both controlled and field environments before deploying on-edge devices to ensure reliable performance in real-world conditions.</p></list-item>
<list-item>
<p>b. The weeding tools must be specific to crops or adjustable, allowing them to accommodate environmental variability, including changes in weed morphology, soil types, and topographical conditions. This adaptability ensures the system&#x2019;s effectiveness across various agricultural settings and crop types.</p></list-item>
<list-item>
<p>c. The developed system must have a robust and adjustable design, regardless of geographical location or crop type. It should allow for flexible operation, functioning either as an independent unit or as a dependent unit mounted on a tractor. For example, mechanical weeding systems should be capable of operating autonomously as robotic platforms or integrated with tractors for simultaneous weeding and other intercultural operations (<xref ref-type="bibr" rid="B38">GreenTech Robotics, 2023</xref>).</p></list-item>
<list-item>
<p>d. The components&#x2014;such as sensors and computing devices&#x2014;that make up the robotic mechanical weeding system should be selected and integrated to balance high performance with economic feasibility. Additionally, the system&#x2019;s control logic must include a self-reset feature to recalibrate the platform to its default settings in case of operational disruptions during fieldwork.</p></list-item>
<list-item>
<p>e. The system should incorporate an intuitive and user-friendly interface, enabling farmers and operators to easily understand the system&#x2019;s operations. The interface should also assist users in diagnosing and resolving issues that may arise during field operations in real time.</p></list-item>
<list-item>
<p>f. To improve the navigation accuracy of the robotic platform, sensor fusion should be utilized. This means integrating multiple sensors, including LiDAR, ultrasonic sensors, RTK-GPS, and RGB cameras, to offset the limitations of individual sensors. The combination of these technologies guarantees precise localization and reliable performance across varying environmental conditions.</p></list-item>
</list>
<p>While mechanical weeding serves as a reliable option for effective weed management, an integrated method that combines mechanical systems with advanced techniques such as laser-based weed removal and &#x201c;see-and-spray&#x201d; technologies can enhance performance even further. This hybrid strategy provides intelligent application capabilities while emphasizing environmental safety.</p>
<p>Robotic systems that meet these criteria and follow the guidelines should be prioritized for commercialization and large-scale implementation. These guidelines not only provide a roadmap for developing autonomous and sustainable mechanical weed management systems but also highlight future research directions aimed at tackling the global challenge of weed management. Advanced mechanical weeding approaches should focus on net yield as a critical performance indicator, optimizing system design to maximize crop yields for specific varieties. By aligning system performance with yield outcomes, mechanical weeding systems can become a transformative solution for precision agriculture.</p>
</sec>
<sec id="s5" sec-type="conclusions">
<label>5</label>
<title>Conclusion</title>
<p>The ongoing research and advancements in the integration of Artificial Intelligence, machine vision and robotics into mechanical weed control systems has advanced significantly, positioning intelligent mechanical weeding systems as an important context in precision agriculture sector. This review examines a broad body of literature synthesizing insights on mechanical weeding methods, sensing and detection technologies, integrated robotic platforms and actuation strategies that define the state of the art in autonomous weed management. The evidence shows that intelligent mechanical weeders leveraging high resolution RGB cameras, sensors and DL models such as YOLO and ResNet, are steadily improving in their capacity to identify and localize weeds under real field conditions. These systems, paired with adaptive end-effectors provide an increasingly reliable means of controlling inter and intra-row weeds with greater precision compared to traditional approaches.</p>
<p>Over the past two decades, intelligent mechanical weeding systems have progressed from conceptual prototypes to increasingly functional field-ready platforms, yet their widespread adoption remains constrained by persistent challenges. Accurate weed&#x2013;crop differentiation under diverse field conditions is still difficult, as lighting variability, morphological similarities, and occlusion often reduce classification accuracy and compromise real-time actuation. Synchronizing detection models with mechanical tool actuation in real time also requires high computational resources, creating trade-offs between speed and accuracy when deployed on embedded or edge devices. Crop injury risks, inconsistent performance across soil types and terrains, and high system costs further limit confidence among farmers and stakeholders. These obstacles highlight that the next generation of intelligent mechanical weeding systems must be not only technologically advanced but also economically viable, scalable, and easy to use.</p>
<p>Promising research directions are emerging to address these gaps. Lightweight and efficient deep learning algorithms optimized for edge computing platforms will be essential to achieve accurate and real-time detection in resource-constrained environments. Adaptive end-effectors capable of operating across different crop systems, weed growth stages, and soil conditions can minimize crop disturbance while improving robustness. Likewise, reliable navigation will increasingly depend on sensor fusion approaches that combine LiDAR, GPS, IMUs, and computer vision to deliver stable performance in heterogeneous and unstructured terrains. Modular platform designs with intuitive user interfaces will lower adoption barriers by enabling farmers to operate and troubleshoot systems without specialized expertise. Just as important, more long-term and large-scale field trials must be conducted under varying agro-ecological conditions to validate performance, ensure reliability, and strengthen confidence in these technologies.</p>
<p>The future of intelligent weeding will likely rest on hybrid approaches that integrate mechanical control with complementary methods such as selective see-and-spray systems or laser-based techniques. Such integration can enhance precision, reduce herbicide reliance, and provide a more holistic and sustainable weed management solution. By combining the mechanical precision of robotics with the flexibility of other innovative methods, these hybrid platforms can address both environmental sustainability and economic feasibility. In conclusion, while significant challenges remain, the convergence of AI-driven detection, adaptive mechanical tools, and advanced robotic platforms is paving the way for a new era of weed management. Intelligent mechanical weeders hold the potential to reduce chemical inputs, improve crop yields, and promote sustainable farming practices. As research continues to refine detection accuracy, actuation efficiency, navigation reliability, and system affordability, these technologies are poised to become indispensable components of precision agriculture, offering practical and scalable solutions to the pressing challenge of sustainable food production.</p>
</sec>
</body>
<back>
<sec id="s6" sec-type="author-contributions">
<title>Author contributions</title>
<p>SD: Data curation, Formal Analysis, Methodology, Writing &#x2013; original draft. AU: Investigation, Writing &#x2013; review &amp; editing, Methodology, Formal Analysis. XS: Funding acquisition, Writing &#x2013; review &amp; editing, Supervision, Project administration.</p></sec>
<sec id="s8" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p></sec>
<sec id="s9" sec-type="ai-statement">
<title>Generative AI statement</title>
<p>The author(s) declare that no Generative AI was used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p></sec>
<sec id="s10" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p></sec>
<ref-list>
<title>References</title>
<ref id="B1">
<mixed-citation publication-type="web">
<person-group person-group-type="author">
<name><surname>Aarab</surname> <given-names>C.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Everything you need to know about lidar in automotive | Keysight blogs</article-title>. Available online at: <uri xlink:href="https://www.keysight.com/blogs/en/inds/2023/10/04/everything-you-need-to-know-about-lidar-in-automotive">https://www.keysight.com/blogs/en/inds/2023/10/04/everything-you-need-to-know-about-lidar-in-automotive</uri> (Accessed <date-in-citation content-type="access-date">January 21, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B2">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Abanay</surname> <given-names>A.</given-names></name>
<name><surname>Masmoudi</surname> <given-names>L.</given-names></name>
<name><surname>Ansari</surname> <given-names>M. E.</given-names></name>
<name><surname>Gonzalez-Jimenez</surname> <given-names>J.</given-names></name>
<name><surname>Moreno</surname> <given-names>F.-A.</given-names></name>
<name><surname>Abanay</surname> <given-names>A.</given-names></name>
<etal/>
</person-group>. (<year>2022</year>). 
<article-title>LIDAR-based autonomous navigation method for an agricultural mobile robot in strawberry greenhouse: AgriEco Robot</article-title>. <source>AIMS Electron. Electrical Eng.</source> <volume>6</volume>, <fpage>317</fpage>&#x2013;<lpage>328</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3934/electreng.2022019</pub-id>
</mixed-citation>
</ref>
<ref id="B3">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Abebe</surname> <given-names>D.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Effect of hand frequency weeding on crop growth parameters and yield</article-title>. <source>J. Food Nutrit</source> <volume>3</volume>, <fpage>2836</fpage>&#x2013;<lpage>2276</lpage>.
</mixed-citation>
</ref>
<ref id="B4">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Ahmad</surname> <given-names>M. T.</given-names></name>
<name><surname>Tang</surname> <given-names>L.</given-names></name>
<name><surname>Steward</surname> <given-names>B. L.</given-names></name>
</person-group> (<year>2014</year>). &#x201c;
<article-title>Automated mechanical weeding</article-title>,&#x201d; in <source>Automation: the future of weed control in cropping systems</source>. Eds. 
<person-group person-group-type="editor">
<name><surname>Young</surname> <given-names>S. L.</given-names></name>
<name><surname>Pierce</surname> <given-names>F. J.</given-names></name>
</person-group> (<publisher-loc>Dordrecht</publisher-loc>: 
<publisher-name>Springer Netherlands</publisher-name>), <fpage>125</fpage>&#x2013;<lpage>137</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/978-94-007-7512-1_7</pub-id>
</mixed-citation>
</ref>
<ref id="B5">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Ahmadi</surname> <given-names>A.</given-names></name>
<name><surname>Halstead</surname> <given-names>M.</given-names></name>
<name><surname>McCool</surname> <given-names>C.</given-names></name>
</person-group> (<year>2022</year>). &#x201c;
<article-title>BonnBot-I: A precise weed management and crop monitoring platform</article-title>,&#x201d; in <conf-name>2022 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</conf-name>. <publisher-loc>Kyoto, Japan</publisher-loc>: 
<publisher-name>IEEE</publisher-name>. p. <fpage>9202</fpage>&#x2013;<lpage>9209</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/IROS47612.2022.9981304</pub-id>
</mixed-citation>
</ref>
<ref id="B6">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Alagbo</surname> <given-names>O.</given-names></name>
<name><surname>Spaeth</surname> <given-names>M.</given-names></name>
<name><surname>Saile</surname> <given-names>M.</given-names></name>
<name><surname>Schumacher</surname> <given-names>M.</given-names></name>
<name><surname>Gerhards</surname> <given-names>R.</given-names></name>
<name><surname>Alagbo</surname> <given-names>O.</given-names></name>
<etal/>
</person-group>. (<year>2022</year>). 
<article-title>Weed management in ridge tillage systems&#x2014;A review</article-title>. <source>Agronomy</source> <volume>12</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy12040910</pub-id>
</mixed-citation>
</ref>
<ref id="B7">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>And&#xfa;jar</surname> <given-names>D.</given-names></name>
<name><surname>Weis</surname> <given-names>M.</given-names></name>
<name><surname>Gerhards</surname> <given-names>R.</given-names></name>
</person-group> (<year>2012</year>). 
<article-title>An ultrasonic system for weed detection in cereal crops</article-title>. <source>Sensors</source> <volume>12</volume>, <elocation-id>12</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s121217343</pub-id>, PMID: <pub-id pub-id-type="pmid">23443401</pub-id>
</mixed-citation>
</ref>
<ref id="B8">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Asaf</surname> <given-names>E.</given-names></name>
<name><surname>Shulner</surname> <given-names>I.</given-names></name>
<name><surname>Bakshian</surname> <given-names>H.</given-names></name>
<name><surname>Kapiluto</surname> <given-names>O.</given-names></name>
<name><surname>Eizenberg</surname> <given-names>H.</given-names></name>
<name><surname>Lati</surname> <given-names>R. N.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>The finger weeder cultivator for intra-row mechanical weed control: Effects of uprooting force on selected weed species</article-title>. <source>Weed Res.</source> <volume>64</volume>, <fpage>321</fpage>&#x2013;<lpage>332</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/wre.12652</pub-id>
</mixed-citation>
</ref>
<ref id="B9">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Bajwa</surname> <given-names>A. A.</given-names></name>
<name><surname>Mahajan</surname> <given-names>G.</given-names></name>
<name><surname>Chauhan</surname> <given-names>B. S.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Nonconventional weed management strategies for modernAgriculture</article-title>. <source>Weed Sci.</source> <volume>63</volume>, <fpage>723</fpage>&#x2013;<lpage>747</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1614/WS-D-15-00064.1</pub-id>
</mixed-citation>
</ref>
<ref id="B10">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Balas</surname> <given-names>P.</given-names></name>
<name><surname>Makavana</surname> <given-names>J.</given-names></name>
<name><surname>Mohnot</surname> <given-names>P.</given-names></name>
<name><surname>Jhala</surname> <given-names>K.</given-names></name>
<name><surname>Yadav</surname> <given-names>R.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Inter and intra row Weeders: A review</article-title>. <source>Curr. J. Appl. Sci. Technol.</source> <volume>41</volume>, <fpage>1</fpage>&#x2013;<lpage>9</lpage>.
</mixed-citation>
</ref>
<ref id="B11">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Bawden</surname> <given-names>O.</given-names></name>
<name><surname>Kulk</surname> <given-names>J.</given-names></name>
<name><surname>Russell</surname> <given-names>R.</given-names></name>
<name><surname>McCool</surname> <given-names>C.</given-names></name>
<name><surname>English</surname> <given-names>A.</given-names></name>
<name><surname>Dayoub</surname> <given-names>F.</given-names></name>
<etal/>
</person-group>. (<year>2017</year>). 
<article-title>Robot for weed species plant-specific management</article-title>. <source>J. Field Robotics</source> <volume>34</volume>, <fpage>1179</fpage>&#x2013;<lpage>1199</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/rob.21727</pub-id>
</mixed-citation>
</ref>
<ref id="B12">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Berge</surname> <given-names>T. W.</given-names></name>
<name><surname>Urdal</surname> <given-names>F.</given-names></name>
<name><surname>Torp</surname> <given-names>T.</given-names></name>
<name><surname>Andreasen</surname> <given-names>C.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>A sensor-based decision model for precision weed harrowing</article-title>. <source>Agronomy</source> <volume>14</volume>, <elocation-id>1</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy14010088</pub-id>
</mixed-citation>
</ref>
<ref id="B13">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Berkmortel</surname> <given-names>N.</given-names></name>
<name><surname>Curtis</surname> <given-names>M.</given-names></name>
<name><surname>Johnson</surname> <given-names>C.</given-names></name>
<name><surname>Schmidt</surname> <given-names>A.</given-names></name>
<name><surname>Hill</surname> <given-names>E.</given-names></name>
<name><surname>Andrew Gadsden</surname> <given-names>S.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Development of a mechatronic system for plant recognition and intra-row weeding with machine vision</article-title>. <source>J. Agric. Sci.</source> <volume>13</volume>, <elocation-id>3</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.5539/jas.v13n3p1</pub-id>
</mixed-citation>
</ref>
<ref id="B14">
<mixed-citation publication-type="web">
<person-group person-group-type="author">
<name><surname>Bernier</surname> <given-names>C.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Weeding robots: Redefining sustainability in agriculture | HowToRobot</article-title>. Available online at: <uri xlink:href="https://howtorobot.com/expert-insight/weeding-robots-redefining-sustainability-agriculture">https://howtorobot.com/expert-insight/weeding-robots-redefining-sustainability-agriculture</uri> (Accessed <date-in-citation content-type="access-date">January 14, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B15">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Binbin</surname> <given-names>X.</given-names></name>
<name><surname>Jizhan</surname> <given-names>L.</given-names></name>
<name><surname>Meng</surname> <given-names>H.</given-names></name>
<name><surname>Jian</surname> <given-names>W.</given-names></name>
<name><surname>Zhujie</surname> <given-names>X.</given-names></name>
</person-group> (<year>2021</year>). &#x201c;
<article-title>Research progress on autonomous navigation technology of agricultural robot</article-title>,&#x201d; in <conf-name>2021 IEEE 11th Annual International Conference on CYBER Technology in Automation</conf-name>. <fpage>891</fpage>&#x2013;<lpage>898</lpage> (<publisher-loc>Jiaxing, China</publisher-loc>: 
<publisher-name>Control, and Intelligent Systems (CYBER</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.1109/CYBER53097.2021.9588152</pub-id>
</mixed-citation>
</ref>
<ref id="B16">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Bond</surname> <given-names>W.</given-names></name>
<name><surname>Turner</surname> <given-names>R.</given-names></name>
<name><surname>Grundy</surname> <given-names>A.</given-names></name>
</person-group> (<year>2003</year>). <source>A review of non-chemical weed management</source>. <publisher-loc>Coventry, UK</publisher-loc>: 
<publisher-name>HDRA, the Organic Organisation</publisher-name>.
</mixed-citation>
</ref>
<ref id="B17">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Briot</surname> <given-names>S.</given-names></name>
<name><surname>Khalil</surname> <given-names>W.</given-names></name>
<name><surname>Briot</surname> <given-names>S.</given-names></name>
<name><surname>Khalil</surname> <given-names>W.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Homogeneous transformation matrix</article-title>. <source>Dynamics Parallel Robots: From Rigid Bodies to Flexible Elements</source> <volume>35</volume>, <fpage>19</fpage>&#x2013;<lpage>32</lpage>.
</mixed-citation>
</ref>
<ref id="B18">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Buhler</surname> <given-names>D. D.</given-names></name>
<name><surname>Doll</surname> <given-names>J. D.</given-names></name>
<name><surname>Proost</surname> <given-names>R. T.</given-names></name>
<name><surname>Visocky</surname> <given-names>M. R.</given-names></name>
</person-group> (<year>1995</year>). 
<article-title>Integrating mechanical weeding with reduced herbicide use in conservation tillage corn production systems</article-title>. <source>Agron. J.</source> <volume>87</volume>, <fpage>507</fpage>&#x2013;<lpage>512</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.2134/agronj1995.00021962008700030018x</pub-id>
</mixed-citation>
</ref>
<ref id="B19">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Cao</surname> <given-names>C.-T.</given-names></name>
<name><surname>Do</surname> <given-names>V.-P.</given-names></name>
<name><surname>Lee</surname> <given-names>B.-R.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>A novel indirect calibration approach for robot positioning error compensation based on neural network and hand-eye vision</article-title>. <source>Appl. Sci.</source> <volume>9</volume>, <elocation-id>9</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/app9091940</pub-id>
</mixed-citation>
</ref>
<ref id="B20">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chandel</surname> <given-names>N. S.</given-names></name>
<name><surname>Chandel</surname> <given-names>A. K.</given-names></name>
<name><surname>Roul</surname> <given-names>A. K.</given-names></name>
<name><surname>Solanke</surname> <given-names>K. R.</given-names></name>
<name><surname>Mehta</surname> <given-names>C. R.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>An integrated inter- and intra-row weeding system for row crops</article-title>. <source>Crop Prot.</source> <volume>145</volume>, <elocation-id>105642</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.cropro.2021.105642</pub-id>
</mixed-citation>
</ref>
<ref id="B21">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chang</surname> <given-names>C.-L.</given-names></name>
<name><surname>Chen</surname> <given-names>H.-W.</given-names></name>
<name><surname>Ke</surname> <given-names>J.-Y.</given-names></name>
<name><surname>Chang</surname> <given-names>C.-L.</given-names></name>
<name><surname>Chen</surname> <given-names>H.-W.</given-names></name>
<name><surname>Ke</surname> <given-names>J.-Y.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Robust guidance and selective spraying based on deep learning for an advanced four-wheeled farming robot</article-title>. <source>Agriculture</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture14010057</pub-id>
</mixed-citation>
</ref>
<ref id="B22">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chang</surname> <given-names>C.-L.</given-names></name>
<name><surname>Xie</surname> <given-names>B.-X.</given-names></name>
<name><surname>Chung</surname> <given-names>S.-C.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Mechanical control with a deep learning method for precise weeding on a farm</article-title>. <source>Agriculture</source> <volume>11</volume>, <elocation-id>1049</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture11111049</pub-id>
</mixed-citation>
</ref>
<ref id="B23">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chicouene</surname> <given-names>D.</given-names></name>
</person-group> (<year>2007</year>). 
<article-title>Mechanical destruction of weeds. A review</article-title>. <source>Agron. Sustain. Dev.</source> <volume>27</volume>, <fpage>19</fpage>&#x2013;<lpage>27</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1051/agro:2006012</pub-id>
</mixed-citation>
</ref>
<ref id="B24">
<mixed-citation publication-type="book">
<person-group person-group-type="author"><collab>Clearpath Robotics</collab>
</person-group> (<year>2023</year>). <source>Husky observer</source> (
<publisher-name>Clearpath Robotics</publisher-name>). Available online at: <uri xlink:href="https://clearpathrobotics.com/husky-observer/">https://clearpathrobotics.com/husky-observer/</uri> (Accessed <date-in-citation content-type="access-date">January 21, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B25">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Cloutier</surname> <given-names>D.</given-names></name>
<name><surname>van der Weide</surname> <given-names>R.</given-names></name>
<name><surname>Peruzzi</surname> <given-names>A.</given-names></name>
<name><surname>Leblanc</surname> <given-names>M.</given-names></name>
</person-group> (<year>2007</year>). &#x201c;
<article-title>Mechanical weed management</article-title>,&#x201d; in <source>Nonchemical weed management: principles, concepts and technology</source> (
<publisher-name>CAB International</publisher-name>, <publisher-loc>Wallingford, UK</publisher-loc>), <fpage>111</fpage>&#x2013;<lpage>134</lpage>.
</mixed-citation>
</ref>
<ref id="B26">
<mixed-citation publication-type="web">
<person-group person-group-type="author">
<name><surname>Collins</surname> <given-names>M.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Velodyne Lidar, GreenValley International sign multi-year agreement</article-title>. Available online at: <uri xlink:href="https://www.geoweeknews.com/news/velodyne-lidar-greenvalley-international-sign-multi-year-agreement">https://www.geoweeknews.com/news/velodyne-lidar-greenvalley-international-sign-multi-year-agreement</uri> (Accessed <date-in-citation content-type="access-date">January 12, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B27">
<mixed-citation publication-type="book">
<person-group person-group-type="author"><collab>Croplands</collab>
</person-group> (<year>2021</year>). <source>WEED-IT optical spot spraying</source> (
<publisher-name>Croplands Equipment Canada</publisher-name>). Available online at: <uri xlink:href="https://croplands.com/ca/products/weed-it-optical-spot-spraying/">https://croplands.com/ca/products/weed-it-optical-spot-spraying/</uri> (Accessed <date-in-citation content-type="access-date">January 12, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B28">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>D&#xe9;lye</surname> <given-names>C.</given-names></name>
<name><surname>Jasieniuk</surname> <given-names>M.</given-names></name>
<name><surname>Corre</surname> <given-names>V. L.</given-names></name>
</person-group> (<year>2013</year>). 
<article-title>Deciphering the evolution of herbicide resistance in weeds</article-title>. <source>Trends Genet.</source> <volume>29</volume>, <fpage>649</fpage>&#x2013;<lpage>658</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.tig.2013.06.001</pub-id>, PMID: <pub-id pub-id-type="pmid">23830583</pub-id>
</mixed-citation>
</ref>
<ref id="B29">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Dhakal</surname> <given-names>M.</given-names></name>
<name><surname>Zinati</surname> <given-names>G.</given-names></name>
<name><surname>Fulcher</surname> <given-names>M.</given-names></name>
<name><surname>Fornara</surname> <given-names>D.</given-names></name>
<name><surname>Martani</surname> <given-names>E.</given-names></name>
<name><surname>Contina</surname> <given-names>J. B.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). &#x201c;
<article-title>Chapter Three&#x2014;Challenges and emerging opportunities for weed management in organic agriculture</article-title>,&#x201d; in <source>Advances in agronomy</source>, vol. <volume>184</volume> . Ed. 
<person-group person-group-type="editor">
<name><surname>Sparks</surname> <given-names>D. L.</given-names></name>
</person-group> (
<publisher-name>Academic Press</publisher-name>), <fpage>125</fpage>&#x2013;<lpage>172</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/bs.agron.2023.11.002</pub-id>
</mixed-citation>
</ref>
<ref id="B30">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Fennimore</surname> <given-names>S. A.</given-names></name>
<name><surname>Hanson</surname> <given-names>B. D.</given-names></name>
<name><surname>Sosnoskie</surname> <given-names>L. M.</given-names></name>
<name><surname>Samtani</surname> <given-names>J. B.</given-names></name>
<name><surname>Datta</surname> <given-names>A.</given-names></name>
<name><surname>Knezevic</surname> <given-names>S. Z.</given-names></name>
<etal/>
</person-group>. (<year>2013</year>). &#x201c;
<article-title>Field applications of automated weed control: Western Hemisphere</article-title>,&#x201d; in <source>Automation: The future of weed control in cropping systems</source> (<publisher-loc>Dordrecht</publisher-loc>: 
<publisher-name>Springer</publisher-name>), <fpage>151</fpage>&#x2013;<lpage>169</lpage>.
</mixed-citation>
</ref>
<ref id="B31">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Flessner</surname> <given-names>M. L.</given-names></name>
<name><surname>Burke</surname> <given-names>I. C.</given-names></name>
<name><surname>Dille</surname> <given-names>J. A.</given-names></name>
<name><surname>Everman</surname> <given-names>W. J.</given-names></name>
<name><surname>VanGessel</surname> <given-names>M. J.</given-names></name>
<name><surname>Tidemann</surname> <given-names>B.</given-names></name>
<etal/>
</person-group>. (<year>2021</year>). 
<article-title>Potential wheat yield loss due to weeds in the United States and Canada</article-title>. <source>Weed Technol.</source> <volume>35</volume>, <fpage>916</fpage>&#x2013;<lpage>923</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1017/wet.2021.78</pub-id>
</mixed-citation>
</ref>
<ref id="B32">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gagliardi</surname> <given-names>L.</given-names></name>
<name><surname>Fontanelli</surname> <given-names>M.</given-names></name>
<name><surname>Luglio</surname> <given-names>S. M.</given-names></name>
<name><surname>Frasconi</surname> <given-names>C.</given-names></name>
<name><surname>Peruzzi</surname> <given-names>A.</given-names></name>
<name><surname>Raffaelli</surname> <given-names>M.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Evaluation of sustainable strategies for mechanical under-row weed control in the vineyard</article-title>. <source>Agronomy</source> <volume>13</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy13123005</pub-id>
</mixed-citation>
</ref>
<ref id="B33">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gai</surname> <given-names>J.</given-names></name>
<name><surname>Tang</surname> <given-names>L.</given-names></name>
<name><surname>Steward</surname> <given-names>B. L.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Automated crop plant detection based on the fusion of color and depth images for robotic weed control</article-title>. <source>J. Field Robotics</source> <volume>37</volume>, <fpage>35</fpage>&#x2013;<lpage>52</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/rob.21897</pub-id>
</mixed-citation>
</ref>
<ref id="B34">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gao</surname> <given-names>W.-T.</given-names></name>
<name><surname>Su</surname> <given-names>W.-H.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Weed management methods for herbaceous field crops: A review</article-title>. <source>Agronomy</source> <volume>14</volume>, <elocation-id>3</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy14030486</pub-id>
</mixed-citation>
</ref>
<ref id="B35">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gerhards</surname> <given-names>R.</given-names></name>
<name><surname>And&#xfa;jar Sanchez</surname> <given-names>D.</given-names></name>
<name><surname>Hamouz</surname> <given-names>P.</given-names></name>
<name><surname>Peteinatos</surname> <given-names>G. G.</given-names></name>
<name><surname>Christensen</surname> <given-names>S.</given-names></name>
<name><surname>Fernandez-Quintanilla</surname> <given-names>C.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Advances in site-specific weed management in agriculture&#x2014;A review</article-title>. <source>Weed Res.</source> <volume>62</volume>, <fpage>123</fpage>&#x2013;<lpage>133</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/wre.12526</pub-id>
</mixed-citation>
</ref>
<ref id="B36">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gerhards</surname> <given-names>R.</given-names></name>
<name><surname>Kollenda</surname> <given-names>B.</given-names></name>
<name><surname>Machleb</surname> <given-names>J.</given-names></name>
<name><surname>M&#xf6;ller</surname> <given-names>K.</given-names></name>
<name><surname>Butz</surname> <given-names>A.</given-names></name>
<name><surname>Reiser</surname> <given-names>D.</given-names></name>
<etal/>
</person-group>. (<year>2020</year>). 
<article-title>Camera-guided weed hoeing in winter cereals with narrow row distance</article-title>. <source>Gesunde Pflanzen</source> <volume>72</volume>, <fpage>403</fpage>&#x2013;<lpage>411</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s10343-020-00523-5</pub-id>
</mixed-citation>
</ref>
<ref id="B37">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Graham Ram</surname> <given-names>B.</given-names></name>
<name><surname>Zhang</surname> <given-names>Y.</given-names></name>
<name><surname>Costa</surname> <given-names>C.</given-names></name>
<name><surname>Raju Ahmed</surname> <given-names>M.</given-names></name>
<name><surname>Peters</surname> <given-names>T.</given-names></name>
<name><surname>Jhala</surname> <given-names>A.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Palmer amaranth identification using hyperspectral imaging and machine learning technologies in soybean field</article-title>. <source>Comput. Electron. Agric.</source> <volume>215</volume>, <elocation-id>108444</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.108444</pub-id>
</mixed-citation>
</ref>
<ref id="B38">
<mixed-citation publication-type="web">
<person-group person-group-type="author"><collab>GreenTech Robotics</collab>
</person-group> (<year>2023</year>). 
<article-title>WeedSpider</article-title>. Available online at: <uri xlink:href="https://seedspider.com/products/weedspider/">https://seedspider.com/products/weedspider/</uri> (Accessed <date-in-citation content-type="access-date">January 14, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B39">
<mixed-citation publication-type="web">
<person-group person-group-type="author">
<name><surname>Haag</surname> <given-names>D.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>US Patent Application for WEEDING ROBOT MECHANISM Patent Application (Application 20240000060 issued January 4, 2024)&#x2014;Justia Patents Search</article-title>. Available online at: <uri xlink:href="https://patents.justia.com/patent/20240000060">https://patents.justia.com/patent/20240000060</uri> (Accessed <date-in-citation content-type="access-date">January 13, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B40">
<mixed-citation publication-type="web">
<person-group person-group-type="author">
<name><surname>Hoidal</surname> <given-names>N.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Mechanical weed control highlights</article-title>. Available online at: <uri xlink:href="https://blog-fruit-vegetable-ipm.extension.umn.edu/2019/09/mechanical-weed-control-highlights.html">https://blog-fruit-vegetable-ipm.extension.umn.edu/2019/09/mechanical-weed-control-highlights.html</uri> (Accessed <date-in-citation content-type="access-date">November 13, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B41">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Hu</surname> <given-names>W.</given-names></name>
<name><surname>Haq</surname> <given-names>S. I. U.</given-names></name>
<name><surname>Lan</surname> <given-names>Y.</given-names></name>
<name><surname>Zhao</surname> <given-names>Z.</given-names></name>
<name><surname>Ahmad</surname> <given-names>S.</given-names></name>
<name><surname>Al Bahir</surname> <given-names>A.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Design and performance evaluation of a spiral bar precision weeding mechanism for corn fields</article-title>. <source>Sci. Rep.</source> <volume>14</volume>, <fpage>28186</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41598-024-76311-2</pub-id>, PMID: <pub-id pub-id-type="pmid">39548138</pub-id>
</mixed-citation>
</ref>
<ref id="B42">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Hussain</surname> <given-names>M.</given-names></name>
<name><surname>Farooq</surname> <given-names>S.</given-names></name>
<name><surname>Merfield</surname> <given-names>C.</given-names></name>
<name><surname>Jabran</surname> <given-names>K.</given-names></name>
</person-group> (<year>2018</year>). &#x201c;
<article-title>Mechanical weed control</article-title>,&#x201d; in <source>Non-chemical weed control</source> (
<publisher-name>Elsevier</publisher-name>), <fpage>133</fpage>&#x2013;<lpage>155</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/B978-0-12-809881-3.00008-5</pub-id>
</mixed-citation>
</ref>
<ref id="B43">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jabran</surname> <given-names>K.</given-names></name>
<name><surname>Chauhan</surname> <given-names>B. S.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Weed management in aerobic rice systems</article-title>. <source>Crop Prot.</source> <volume>78</volume>, <fpage>151</fpage>&#x2013;<lpage>163</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.cropro.2015.09.005</pub-id>
</mixed-citation>
</ref>
<ref id="B44">
<mixed-citation publication-type="web">
<person-group person-group-type="author">
<name><surname>Jafarbiglu</surname> <given-names>H.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>RGB | Digital agriculture laboratory</article-title>. Available online at: <uri xlink:href="https://digitalag.ucdavis.edu/41-rgb">https://digitalag.ucdavis.edu/41-rgb</uri> (Accessed <date-in-citation content-type="access-date">January 12, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B45">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jiang</surname> <given-names>W.</given-names></name>
<name><surname>Quan</surname> <given-names>L.</given-names></name>
<name><surname>Wei</surname> <given-names>G.</given-names></name>
<name><surname>Chang</surname> <given-names>C.</given-names></name>
<name><surname>Geng</surname> <given-names>T.</given-names></name>
</person-group> (<year>2023</year>b). 
<article-title>A conceptual evaluation of a weed control method with post-damage application of herbicides: A composite intelligent intra-row weeding robot</article-title>. <source>Soil Tillage Res.</source> <volume>234</volume>, <elocation-id>105837</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.still.2023.105837</pub-id>
</mixed-citation>
</ref>
<ref id="B46">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jiang</surname> <given-names>B.</given-names></name>
<name><surname>Zhang</surname> <given-names>J.-L.</given-names></name>
<name><surname>Su</surname> <given-names>W.-H.</given-names></name>
<name><surname>Hu</surname> <given-names>R.</given-names></name>
</person-group> (<year>2023</year>a). 
<article-title>A SPH-YOLOv5x-based automatic system for intra-row weed control in lettuce</article-title>. <source>Agronomy</source> <volume>13</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy13122915</pub-id>
</mixed-citation>
</ref>
<ref id="B47">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jiao</surname> <given-names>J.</given-names></name>
<name><surname>Wang</surname> <given-names>Z.</given-names></name>
<name><surname>Luo</surname> <given-names>H.</given-names></name>
<name><surname>Chen</surname> <given-names>G.</given-names></name>
<name><surname>Liu</surname> <given-names>H.</given-names></name>
<name><surname>Guan</surname> <given-names>J.</given-names></name>
<etal/>
</person-group>. (<year>2022</year>). 
<article-title>Development of a mechanical weeder and experiment on the growth, yield and quality of rice</article-title>. <source>Int. J. Agric. Biol. Eng.</source> <volume>15</volume>, <elocation-id>3</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.25165/ijabe.v15i3.6978</pub-id>
</mixed-citation>
</ref>
<ref id="B48">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jiao</surname> <given-names>J.</given-names></name>
<name><surname>Zang</surname> <given-names>Y.</given-names></name>
<name><surname>Chen</surname> <given-names>C.</given-names></name>
<name><surname>Jiao</surname> <given-names>J.</given-names></name>
<name><surname>Zang</surname> <given-names>Y.</given-names></name>
<name><surname>Chen</surname> <given-names>C.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Key technologies of intelligent weeding for vegetables: A review</article-title>. <source>Agriculture</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture14081378</pub-id>
</mixed-citation>
</ref>
<ref id="B49">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jin</surname> <given-names>X.</given-names></name>
<name><surname>Che</surname> <given-names>J.</given-names></name>
<name><surname>Chen</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Weed identification using deep learning and image processing in vegetable plantation</article-title>. <source>IEEE Access</source> <volume>9</volume>, <fpage>10940</fpage>&#x2013;<lpage>10950</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2021.3050296</pub-id>
</mixed-citation>
</ref>
<ref id="B50">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jin</surname> <given-names>T.</given-names></name>
<name><surname>Han</surname> <given-names>X.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Robotic arms in precision agriculture: A comprehensive review of the technologies, applications, challenges, and future prospects</article-title>. <source>Comput. Electron. Agric.</source> <volume>221</volume>, <elocation-id>108938</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.108938</pub-id>
</mixed-citation>
</ref>
<ref id="B51">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Jog</surname> <given-names>M.</given-names></name>
<name><surname>Agashe</surname> <given-names>S.</given-names></name>
</person-group> (<year>2024</year>). &#x201c;
<article-title>Agrobot for inter-row and intra-row weeding</article-title>,&#x201d; in <conf-name>2024 International Conference on Emerging Smart Computing and Informatics (ESCI)</conf-name>. <publisher-loc>Pune, India</publisher-loc>: 
<publisher-name>IEEE</publisher-name>. p. <fpage>1</fpage>&#x2013;<lpage>4</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ESCI59607.2024.10497296</pub-id>
</mixed-citation>
</ref>
<ref id="B52">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kennedy</surname> <given-names>H.</given-names></name>
<name><surname>Fennimore</surname> <given-names>S. A.</given-names></name>
<name><surname>Slaughter</surname> <given-names>D. C.</given-names></name>
<name><surname>Nguyen</surname> <given-names>T. T.</given-names></name>
<name><surname>Vuong</surname> <given-names>V. L.</given-names></name>
<name><surname>Raja</surname> <given-names>R.</given-names></name>
<etal/>
</person-group>. (<year>2020</year>). 
<article-title>Crop signal markers facilitate crop detection and weed removal from lettuce and tomato by an intelligent cultivator</article-title>. <source>Weed Technol.</source> <volume>34</volume>, <fpage>342</fpage>&#x2013;<lpage>350</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1017/wet.2019.120</pub-id>
</mixed-citation>
</ref>
<ref id="B53">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kouwenhoven</surname> <given-names>J. K.</given-names></name>
<name><surname>Wevers</surname> <given-names>J. D. A.</given-names></name>
<name><surname>Post</surname> <given-names>B. J.</given-names></name>
</person-group> (<year>1991</year>). 
<article-title>Possibilities of mechanical post-emergence weed control in sugar beet</article-title>. <source>Soil Tillage Res.</source> <volume>21</volume>, <fpage>85</fpage>&#x2013;<lpage>95</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/0167-1987(91)90007-K</pub-id>
</mixed-citation>
</ref>
<ref id="B54">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kumar</surname> <given-names>S. P.</given-names></name>
<name><surname>Tewari</surname> <given-names>V. K.</given-names></name>
<name><surname>Chandel</surname> <given-names>A. K.</given-names></name>
<name><surname>Mehta</surname> <given-names>C. R.</given-names></name>
<name><surname>Nare</surname> <given-names>B.</given-names></name>
<name><surname>Chethan</surname> <given-names>C. R.</given-names></name>
<etal/>
</person-group>. (<year>2020</year>). 
<article-title>A fuzzy logic algorithm derived mechatronic concept prototype for crop damage avoidance during eco-friendly eradication of intra-row weeds</article-title>. <source>Artif. Intell. Agric.</source> <volume>4</volume>, <fpage>116</fpage>&#x2013;<lpage>126</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.aiia.2020.06.004</pub-id>
</mixed-citation>
</ref>
<ref id="B55">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kunz</surname> <given-names>C.</given-names></name>
<name><surname>Weber</surname> <given-names>J. F.</given-names></name>
<name><surname>Gerhards</surname> <given-names>R.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Benefits of precision farming technologies for mechanical weed control in soybean and sugar beet&#x2014;Comparison of precision hoeing with conventional mechanical weed control</article-title>. <source>Agronomy</source> <volume>5</volume>, <elocation-id>2</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy5020130</pub-id>
</mixed-citation>
</ref>
<ref id="B56">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kunz</surname> <given-names>C.</given-names></name>
<name><surname>Weber</surname> <given-names>J. F.</given-names></name>
<name><surname>Peteinatos</surname> <given-names>G. G.</given-names></name>
<name><surname>S&#xf6;kefeld</surname> <given-names>M.</given-names></name>
<name><surname>Gerhards</surname> <given-names>R.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>Camera steered mechanical weed control in sugar beet, maize and soybean</article-title>. <source>Precis. Agric.</source> <volume>19</volume>, <fpage>708</fpage>&#x2013;<lpage>720</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11119-017-9551-4</pub-id>
</mixed-citation>
</ref>
<ref id="B57">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Laftouty</surname> <given-names>Z.</given-names></name>
<name><surname>Bouraada</surname> <given-names>K.</given-names></name>
<name><surname>Bakrim</surname> <given-names>A.</given-names></name>
<name><surname>Boudhir</surname> <given-names>A. A.</given-names></name>
<name><surname>Essafi</surname> <given-names>M.</given-names></name>
</person-group> (<year>2023</year>). &#x201c;
<article-title>Weed detection and treatment by RGB system&#x2014;Case of the FPL smart farm</article-title>,&#x201d; in <conf-name>Proceedings of the 6th International Conference on Networking, Intelligent Systems &amp; Security</conf-name>. <publisher-loc>New York, NY, USA</publisher-loc>: 
<publisher-name>Association for Computing Machinery</publisher-name><fpage>1</fpage>&#x2013;<lpage>10</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1145/3607720.3607752</pub-id>
</mixed-citation>
</ref>
<ref id="B58">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Langsenkamp</surname> <given-names>F.</given-names></name>
<name><surname>Sellmann</surname> <given-names>F.</given-names></name>
<name><surname>Kohlbrecher</surname> <given-names>M.</given-names></name>
<name><surname>Kielhorn</surname> <given-names>A.</given-names></name>
<name><surname>Michaels</surname> <given-names>A.</given-names></name>
<name><surname>Ruckelshausen</surname> <given-names>A.</given-names></name>
<etal/>
</person-group>. (<year>2014</year>). <source>Tube Stamp for mechanical intra-row individual Plant Weed Control</source>. <publisher-loc>Beijing, China</publisher-loc>: 
<publisher-name>CIGR (International Commission of Agricultural and Biosystems Engineering)</publisher-name>.
</mixed-citation>
</ref>
<ref id="B59">
<mixed-citation publication-type="web">
<person-group person-group-type="author"><collab>LEMKEN</collab>
</person-group> (<year>2022</year>). 
<article-title>IC-weeder AI | LEMKEN</article-title>. Available online at: <uri xlink:href="https://lemken.com/en-en/agricultural-machines/cropcare/weed-control/mechanical-weed-control/ic-weeder-ai">https://lemken.com/en-en/agricultural-machines/cropcare/weed-control/mechanical-weed-control/ic-weeder-ai</uri> (Accessed <date-in-citation content-type="access-date">January 10, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B60">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>Y.</given-names></name>
<name><surname>Guo</surname> <given-names>Z.</given-names></name>
<name><surname>Shuang</surname> <given-names>F.</given-names></name>
<name><surname>Zhang</surname> <given-names>M.</given-names></name>
<name><surname>Li</surname> <given-names>X.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Key technologies of machine vision for weeding robots: A review and benchmark</article-title>. <source>Comput. Electron. Agric.</source> <volume>196</volume>, <elocation-id>106880</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2022.106880</pub-id>
</mixed-citation>
</ref>
<ref id="B61">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Liu</surname> <given-names>C.</given-names></name>
<name><surname>Yang</surname> <given-names>K.</given-names></name>
<name><surname>Chen</surname> <given-names>Y.</given-names></name>
<name><surname>Gong</surname> <given-names>H.</given-names></name>
<name><surname>Feng</surname> <given-names>X.</given-names></name>
<name><surname>Tang</surname> <given-names>Z.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Benefits of mechanical weeding for weed control, rice growth characteristics and yield in paddy fields</article-title>. <source>Field Crops Res.</source> <volume>293</volume>, <elocation-id>108852</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.fcr.2023.108852</pub-id>
</mixed-citation>
</ref>
<ref id="B62">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Machleb</surname> <given-names>J.</given-names></name>
<name><surname>Peteinatos</surname> <given-names>G. G.</given-names></name>
<name><surname>Kollenda</surname> <given-names>B. L.</given-names></name>
<name><surname>And&#xfa;jar</surname> <given-names>D.</given-names></name>
<name><surname>Gerhards</surname> <given-names>R.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Sensor-based mechanical weed control: Present state and prospects</article-title>. <source>Comput. Electron. Agric.</source> <volume>176</volume>, <elocation-id>105638</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2020.105638</pub-id>
</mixed-citation>
</ref>
<ref id="B63">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Machleb</surname> <given-names>J.</given-names></name>
<name><surname>Peteinatos</surname> <given-names>G. G.</given-names></name>
<name><surname>S&#xf6;kefeld</surname> <given-names>M.</given-names></name>
<name><surname>Gerhards</surname> <given-names>R.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Sensor-based intrarow mechanical weed control in sugar beets with motorized finger weeders</article-title>. <source>Agronomy</source> <volume>11</volume>, <elocation-id>8</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy11081517</pub-id>
</mixed-citation>
</ref>
<ref id="B64">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Malavazi</surname> <given-names>F. B. P.</given-names></name>
<name><surname>Guyonneau</surname> <given-names>R.</given-names></name>
<name><surname>Fasquel</surname> <given-names>J.-B.</given-names></name>
<name><surname>Lagrange</surname> <given-names>S.</given-names></name>
<name><surname>Mercier</surname> <given-names>F.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>LiDAR-only based navigation algorithm for an autonomous agricultural robot</article-title>. <source>Comput. Electron. Agric.</source> <volume>154</volume>, <fpage>71</fpage>&#x2013;<lpage>79</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2018.08.034</pub-id>
</mixed-citation>
</ref>
<ref id="B65">
<mixed-citation publication-type="book">
<person-group person-group-type="author"><collab>MaxBotix</collab>
</person-group> (<year>2019</year>). <source>Ultrasonic sensors: advantages and limitations</source> (
<publisher-name>MaxBotix</publisher-name>). Available online at: <uri xlink:href="https://maxbotix.com/blogs/blog/advantages-limitations-ultrasonic-sensors">https://maxbotix.com/blogs/blog/advantages-limitations-ultrasonic-sensors</uri> (Accessed <date-in-citation content-type="access-date">January 12, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B66">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>McCool</surname> <given-names>C.</given-names></name>
<name><surname>Beattie</surname> <given-names>J.</given-names></name>
<name><surname>Firn</surname> <given-names>J.</given-names></name>
<name><surname>Lehnert</surname> <given-names>C.</given-names></name>
<name><surname>Kulk</surname> <given-names>J.</given-names></name>
<name><surname>Bawden</surname> <given-names>O.</given-names></name>
<etal/>
</person-group>. (<year>2018</year>). 
<article-title>Efficacy of mechanical weeding tools: A study into alternative weed management strategies enabled by robotics</article-title>. <source>IEEE Robotics Automation Lett.</source> <volume>3</volume>, <fpage>1184</fpage>&#x2013;<lpage>1190</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/LRA.2018.2794619</pub-id>
</mixed-citation>
</ref>
<ref id="B67">
<mixed-citation publication-type="web">
<person-group person-group-type="author">
<name><surname>Media</surname> <given-names>P.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Specim AFX17</article-title>. Available online at: <uri xlink:href="https://www.photonics.com/Buyers_Guide/Products/Cameras/Specim_AFX17/psp8883">https://www.photonics.com/Buyers_Guide/Products/Cameras/Specim_AFX17/psp8883</uri> (Accessed <date-in-citation content-type="access-date">September 4, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B68">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Mehdizadeh</surname> <given-names>M.</given-names></name>
<name><surname>Mushtaq</surname> <given-names>W.</given-names></name>
</person-group> (<year>2020</year>). &#x201c;
<article-title>Chapter 9 - biological control of weeds by allelopathic compounds from different plants: A bioHerbicide approach</article-title>,&#x201d; in <source>Natural remedies for pest, disease and weed control</source>. Eds. 
<person-group person-group-type="editor">
<name><surname>Egbuna</surname> <given-names>C.</given-names></name>
<name><surname>Sawicka</surname> <given-names>B.</given-names></name>
</person-group> (
<publisher-name>Academic Press</publisher-name>), <fpage>107</fpage>&#x2013;<lpage>117</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/B978-0-12-819304-4.00009-9</pub-id>
</mixed-citation>
</ref>
<ref id="B69">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Melander</surname> <given-names>B.</given-names></name>
<name><surname>Lattanzi</surname> <given-names>B.</given-names></name>
<name><surname>Pannacci</surname> <given-names>E.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Intelligent versus non-intelligent mechanical intra-row weed control in transplanted onion and cabbage</article-title>. <source>Crop Prot.</source> <volume>72</volume>, <fpage>1</fpage>&#x2013;<lpage>8</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.cropro.2015.02.017</pub-id>
</mixed-citation>
</ref>
<ref id="B70">
<mixed-citation publication-type="book">
<person-group person-group-type="author"><collab>MicaSense</collab>
</person-group> (<year>2023</year>). <source>Comparison of micaSense cameras</source> (
<publisher-name>MicaSense Knowledge Base</publisher-name>). Available online at: <uri xlink:href="https://support.micasense.com/hc/en-us/articles/1500007828482-Comparison-of-MicaSense-Cameras">https://support.micasense.com/hc/en-us/articles/1500007828482-Comparison-of-MicaSense-Cameras</uri> (Accessed <date-in-citation content-type="access-date">November 13, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B71">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Moore</surname> <given-names>L. D.</given-names></name>
<name><surname>Jennings</surname> <given-names>K. M.</given-names></name>
<name><surname>Monks</surname> <given-names>D. W.</given-names></name>
<name><surname>Boyette</surname> <given-names>M. D.</given-names></name>
<name><surname>Leon</surname> <given-names>R. G.</given-names></name>
<name><surname>Jordan</surname> <given-names>D. L.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Evaluation of electrical and mechanical Palmer amaranth (Amaranthus palmeri) management in cucumber, peanut, and sweetpotato</article-title>. <source>Weed Technol.</source> <volume>37</volume>, <fpage>53</fpage>&#x2013;<lpage>59</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1017/wet.2023.1</pub-id>
</mixed-citation>
</ref>
<ref id="B72">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Mwitta</surname> <given-names>C.</given-names></name>
<name><surname>Rains</surname> <given-names>G. C.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>The integration of GPS and visual navigation for autonomous navigation of an Ackerman steering mobile robot in cotton fields</article-title>. <source>Front. Robotics AI</source> <volume>11</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/frobt.2024.1359887</pub-id>, PMID: <pub-id pub-id-type="pmid">38680621</pub-id>
</mixed-citation>
</ref>
<ref id="B73">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Nan</surname> <given-names>L.</given-names></name>
<name><surname>Chunlong</surname> <given-names>Z.</given-names></name>
<name><surname>Ziwen</surname> <given-names>C.</given-names></name>
<name><surname>Zenghong</surname> <given-names>M.</given-names></name>
<name><surname>Zhe</surname> <given-names>S.</given-names></name>
<name><surname>Ting</surname> <given-names>Y.</given-names></name>
<etal/>
</person-group>. (<year>2015</year>). 
<article-title>Crop positioning for robotic intra-row weeding based on machine vision</article-title>. <source>Int. J. Agric. Biol. Eng.</source> <volume>8</volume>, <elocation-id>6</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.25165/ijabe.v8i6.1932</pub-id>
</mixed-citation>
</ref>
<ref id="B74">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Naruhn</surname> <given-names>G.</given-names></name>
<name><surname>Schneevoigt</surname> <given-names>V.</given-names></name>
<name><surname>Hartung</surname> <given-names>J.</given-names></name>
<name><surname>Peteinatos</surname> <given-names>G.</given-names></name>
<name><surname>M&#xf6;ller</surname> <given-names>K.</given-names></name>
<name><surname>Gerhards</surname> <given-names>R.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Bi-directional hoeing in maize</article-title>. <source>Weed Res.</source> <volume>63</volume>, <fpage>348</fpage>&#x2013;<lpage>360</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/wre.12597</pub-id>
</mixed-citation>
</ref>
<ref id="B75">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Nath</surname> <given-names>C. P.</given-names></name>
<name><surname>Singh</surname> <given-names>R. G.</given-names></name>
<name><surname>Choudhary</surname> <given-names>V. K.</given-names></name>
<name><surname>Datta</surname> <given-names>D.</given-names></name>
<name><surname>Nandan</surname> <given-names>R.</given-names></name>
<name><surname>Singh</surname> <given-names>S. S.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Challenges and alternatives of herbicide-based weed management</article-title>. <source>Agronomy</source> <volume>14</volume>, <elocation-id>1</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy14010126</pub-id>
</mixed-citation>
</ref>
<ref id="B76">
<mixed-citation publication-type="web">
<person-group person-group-type="author"><collab>Odd.Bot</collab>
</person-group> (<year>2024</year>). 
<article-title>Maverick weeding robot&#x2014;Odd.Bot</article-title>. Available online at: <uri xlink:href="https://www.odd.bot/maverick">https://www.odd.bot/maverick</uri> (Accessed <date-in-citation content-type="access-date">January 14, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B77">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Page</surname> <given-names>M. J.</given-names></name>
<name><surname>McKenzie</surname> <given-names>J. E.</given-names></name>
<name><surname>Bossuyt</surname> <given-names>P. M.</given-names></name>
<name><surname>Boutron</surname> <given-names>I.</given-names></name>
<name><surname>Hoffmann</surname> <given-names>T. C.</given-names></name>
<name><surname>Mulrow</surname> <given-names>C. D.</given-names></name>
<etal/>
</person-group>. (<year>2021</year>). 
<article-title>The PRISMA 2020 statement: An updated guideline for reporting systematic reviews</article-title>. <source>BMJ</source> <volume>372</volume>, <elocation-id>n71</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1136/bmj.n71</pub-id>, PMID: <pub-id pub-id-type="pmid">33782057</pub-id>
</mixed-citation>
</ref>
<ref id="B78">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Pannacci</surname> <given-names>E.</given-names></name>
<name><surname>Lattanzi</surname> <given-names>B.</given-names></name>
<name><surname>Tei</surname> <given-names>F.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>Non-chemical weed management strategies in minor crops: A review</article-title>. <source>Crop Prot.</source> <volume>96</volume>, <fpage>44</fpage>&#x2013;<lpage>58</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.cropro.2017.01.012</pub-id>
</mixed-citation>
</ref>
<ref id="B79">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Pannacci</surname> <given-names>E.</given-names></name>
<name><surname>Onofri</surname> <given-names>A.</given-names></name>
<name><surname>Tei</surname> <given-names>F.</given-names></name>
</person-group> (<year>2018</year>). &#x201c;
<article-title>STRUMENTI NON CHIMICI DI GESTIONE DELLA VEGETAZIONE INFESTANTE NELLE COLTURE ORTICOLE</article-title>,&#x201d; in <source>Atti del XXI Convegno SIRFI&#x201d; Infestanti emergenti e riduzione di disponibilit&#xe0; di erbicidi</source> (
<publisher-name>SIRFI</publisher-name>), <fpage>39</fpage>&#x2013;<lpage>78</lpage>.
</mixed-citation>
</ref>
<ref id="B80">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Parasca</surname> <given-names>S. C.</given-names></name>
<name><surname>Spaeth</surname> <given-names>M.</given-names></name>
<name><surname>Rusu</surname> <given-names>T.</given-names></name>
<name><surname>Bogdan</surname> <given-names>I.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Mechanical weed control: sensor-based inter-row hoeing in sugar beet (Beta vulgaris L.) in the transylvanian depression</article-title>. <source>Agronomy</source> <volume>14</volume>, <elocation-id>1</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy14010176</pub-id>
</mixed-citation>
</ref>
<ref id="B81">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Parish</surname> <given-names>S.</given-names></name>
</person-group> (<year>1990</year>). 
<article-title>A review of non-chemical weed control techniques</article-title>. <source>Biol. Agric. Horticulture</source> <volume>7</volume>, <fpage>117</fpage>&#x2013;<lpage>137</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/01448765.1990.9754540</pub-id>
</mixed-citation>
</ref>
<ref id="B82">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Patterson</surname> <given-names>M. G.</given-names></name>
<name><surname>Wehtje</surname> <given-names>G.</given-names></name>
<name><surname>Goff</surname> <given-names>W. D.</given-names></name>
</person-group> (<year>1990</year>). 
<article-title>Effects of weed control and irrigation on the growth of young pecans</article-title>. <source>Weed Technol.</source> <volume>4</volume>, <fpage>892</fpage>&#x2013;<lpage>894</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1017/S0890037X00026609</pub-id>
</mixed-citation>
</ref>
<ref id="B83">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Post</surname> <given-names>M. A.</given-names></name>
<name><surname>Bianco</surname> <given-names>A.</given-names></name>
<name><surname>Yan</surname> <given-names>X. T.</given-names></name>
</person-group> (<year>2017</year>). &#x201c;
<article-title>Autonomous navigation with ROS for a mobile robot in agricultural fields</article-title>,&#x201d; in <conf-name>Proceedings of the 14th International Conference on Informatics in Control, Automation and Robotics</conf-name>. <publisher-loc>Madrid, Spain</publisher-loc>: 
<publisher-name>SCITEPRESS - Science and Technology Publications</publisher-name><fpage>79</fpage>&#x2013;<lpage>87</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5220/0006434400790087</pub-id>
</mixed-citation>
</ref>
<ref id="B84">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Pradel</surname> <given-names>M.</given-names></name>
<name><surname>de Fays</surname> <given-names>M.</given-names></name>
<name><surname>SeGuineau</surname> <given-names>C.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Comparative Life Cycle Assessment of intra-row and inter-row weeding practices using autonomous robot systems in French vineyards</article-title>. <source>Sci. Total Environ.</source> <volume>838</volume>, <elocation-id>156441</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.scitotenv.2022.156441</pub-id>, PMID: <pub-id pub-id-type="pmid">35660576</pub-id>
</mixed-citation>
</ref>
<ref id="B85">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Pretto</surname> <given-names>A.</given-names></name>
<name><surname>Aravecchia</surname> <given-names>S.</given-names></name>
<name><surname>Burgard</surname> <given-names>W.</given-names></name>
<name><surname>Chebrolu</surname> <given-names>N.</given-names></name>
<name><surname>Dornhege</surname> <given-names>C.</given-names></name>
<name><surname>Falck</surname> <given-names>T.</given-names></name>
<etal/>
</person-group>. (<year>2021</year>). 
<article-title>Building an aerial&#x2013;ground robotics system for precision farming: an adaptable solution</article-title>. <source>IEEE Robotics Automation Magazine</source> <volume>28</volume>, <fpage>29</fpage>&#x2013;<lpage>49</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/MRA.2020.3012492</pub-id>
</mixed-citation>
</ref>
<ref id="B86">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Quan</surname> <given-names>L.</given-names></name>
<name><surname>Jiang</surname> <given-names>W.</given-names></name>
<name><surname>Li</surname> <given-names>H.</given-names></name>
<name><surname>Li</surname> <given-names>H.</given-names></name>
<name><surname>Wang</surname> <given-names>Q.</given-names></name>
<name><surname>Chen</surname> <given-names>L.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Intelligent intra-row robotic weeding system combining deep learning technology with a targeted weeding mode</article-title>. <source>Biosyst. Eng.</source> <volume>216</volume>, <fpage>13</fpage>&#x2013;<lpage>31</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2022.01.019</pub-id>
</mixed-citation>
</ref>
<ref id="B87">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Rai</surname> <given-names>N.</given-names></name>
<name><surname>Sun</surname> <given-names>X.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>WeedVision: A single-stage deep learning architecture to perform weed detection and segmentation using drone-acquired images</article-title>. <source>Comput. Electron. Agric.</source> <volume>219</volume>, <elocation-id>108792</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.108792</pub-id>
</mixed-citation>
</ref>
<ref id="B88">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Rai</surname> <given-names>N.</given-names></name>
<name><surname>Zhang</surname> <given-names>Y.</given-names></name>
<name><surname>Ram</surname> <given-names>B. G.</given-names></name>
<name><surname>Schumacher</surname> <given-names>L.</given-names></name>
<name><surname>Yellavajjala</surname> <given-names>R. K.</given-names></name>
<name><surname>Bajwa</surname> <given-names>S.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Applications of deep learning in precision weed management: A review</article-title>. <source>Comput. Electron. Agric.</source> <volume>206</volume>, <elocation-id>107698</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.107698</pub-id>
</mixed-citation>
</ref>
<ref id="B89">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Raja</surname> <given-names>R.</given-names></name>
<name><surname>Nguyen</surname> <given-names>T. T.</given-names></name>
<name><surname>Vuong</surname> <given-names>V. L.</given-names></name>
<name><surname>Slaughter</surname> <given-names>D. C.</given-names></name>
<name><surname>Fennimore</surname> <given-names>S. A.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>RTD-SEPs: Real-time detection of stem emerging points and classification of crop-weed for robotic weed control in producing tomato</article-title>. <source>Biosyst. Eng.</source> <volume>195</volume>, <fpage>152</fpage>&#x2013;<lpage>171</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2020.05.004</pub-id>
</mixed-citation>
</ref>
<ref id="B90">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Rajashekar</surname> <given-names>M.</given-names></name>
<name><surname>Heblikar</surname> <given-names>V.</given-names></name>
<name><surname>Kumar</surname> <given-names>S. M.</given-names></name>
</person-group> (<year>2014</year>). 
<article-title>Simulation and analysis of low cost weeder</article-title>. <source>Int. J. Res. Eng. Technol.</source> <volume>3</volume>, <fpage>543</fpage>&#x2013;<lpage>549</lpage>.
</mixed-citation>
</ref>
<ref id="B91">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Reiser</surname> <given-names>D.</given-names></name>
<name><surname>Sehsah</surname> <given-names>E.-S.</given-names></name>
<name><surname>Bumann</surname> <given-names>O.</given-names></name>
<name><surname>Morhard</surname> <given-names>J.</given-names></name>
<name><surname>Griepentrog</surname> <given-names>H. W.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Development of an autonomous electric robot implement for intra-row weeding in vineyards</article-title>. <source>Agriculture</source> <volume>9</volume>, <elocation-id>1</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture9010018</pub-id>
</mixed-citation>
</ref>
<ref id="B92">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Riemens</surname> <given-names>M.</given-names></name>
<name><surname>S&#xf8;nderskov</surname> <given-names>M.</given-names></name>
<name><surname>Moonen</surname> <given-names>A.-C.</given-names></name>
<name><surname>Storkey</surname> <given-names>J.</given-names></name>
<name><surname>Kudsk</surname> <given-names>P.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>An Integrated Weed Management framework: A pan-European perspective</article-title>. <source>Eur. J. Agron.</source> <volume>133</volume>, <elocation-id>126443</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.eja.2021.126443</pub-id>
</mixed-citation>
</ref>
<ref id="B93">
<mixed-citation publication-type="book">
<person-group person-group-type="author"><collab>Rigoulet</collab>
</person-group> (<year>2021</year>). <source>The importance of sensor fusion for autonomous vehicles</source> (
<publisher-name>Digital Nuage</publisher-name>). Available online at: <uri xlink:href="https://www.digitalnuage.com/the-importance-of-sensor-fusion-for-autonomous-vehicles">https://www.digitalnuage.com/the-importance-of-sensor-fusion-for-autonomous-vehicles</uri> (Accessed <date-in-citation content-type="access-date">January 21, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B94">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Rossmadl</surname> <given-names>A.</given-names></name>
<name><surname>Gandorfer</surname> <given-names>M.</given-names></name>
<name><surname>Kopfinger</surname> <given-names>S.</given-names></name>
<name><surname>Busboom</surname> <given-names>A.</given-names></name>
</person-group> (<year>2023</year>). &#x201c;
<article-title>Autonomous robotics in agriculture &#x2013; a preliminary techno-economic evaluation of a mechanical weeding system</article-title>,&#x201d; in <conf-name>ISR Europe 2023; 56th International Symposium on Robotics</conf-name>. <publisher-loc>Stuttgart, Germany</publisher-loc>: 
<publisher-name>VDE</publisher-name><fpage>405</fpage>&#x2013;<lpage>411</lpage>. Available online at: <uri xlink:href="https://ieeexplore.ieee.org/abstract/document/10363101">https://ieeexplore.ieee.org/abstract/document/10363101</uri> (Accessed <date-in-citation content-type="access-date">January 20, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B95">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Rother</surname> <given-names>E. T.</given-names></name>
</person-group> (<year>2007</year>). 
<article-title>Systematic literature review X narrative review</article-title>. <source>Acta Paulista Enfermagem</source> <volume>20</volume>, <fpage>v</fpage>&#x2013;<lpage>vi</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1590/S0103-21002007000200001</pub-id>
</mixed-citation>
</ref>
<ref id="B96">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Rueda-Ayala</surname> <given-names>V.</given-names></name>
<name><surname>Rasmussen</surname> <given-names>J.</given-names></name>
<name><surname>Gerhards</surname> <given-names>R.</given-names></name>
</person-group> (<year>2010</year>). &#x201c;
<article-title>Mechanical weed control</article-title>,&#x201d; in <source>Precision crop protection&#x2014;The challenge and use of heterogeneity</source><publisher-loc>Dordrecht, Netherlands</publisher-loc>: 
<publisher-name>Springer</publisher-name>, <fpage>279</fpage>&#x2013;<lpage>294</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/978-90-481-9277-9_17</pub-id>
</mixed-citation>
</ref>
<ref id="B97">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Saber</surname> <given-names>M.</given-names></name>
<name><surname>Lee</surname> <given-names>W. S.</given-names></name>
<name><surname>Burks</surname> <given-names>T. F.</given-names></name>
<name><surname>Schueller</surname> <given-names>J. K.</given-names></name>
<name><surname>Chase</surname> <given-names>C. A.</given-names></name>
<name><surname>MacDonald</surname> <given-names>G. E.</given-names></name>
<etal/>
</person-group>. (<year>2015</year>). &#x201c;
<article-title>Performance and evaluation of intra-row weeder ultrasonic plant detection system and pinch-roller weeding mechanism for vegetable crops</article-title>,&#x201d; in <conf-name>2015 ASABE Annual International Meeting</conf-name>, Vol. <volume>1</volume>. <publisher-loc>Michigan, USA</publisher-loc>: 
<publisher-name>American Society of Agricultural and Biological Engineers</publisher-name>.
</mixed-citation>
</ref>
<ref id="B98">
<mixed-citation publication-type="web">
<person-group person-group-type="author"><collab>Saiwa</collab>
</person-group> (<year>2024</year>). 
<article-title>Weed Detection | Comprehensive overview</article-title>. Available online at: <uri xlink:href="https://saiwa.ai/blog/weed-detection/">https://saiwa.ai/blog/weed-detection/</uri> (Accessed <date-in-citation content-type="access-date">January 25, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B99">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Sanchez</surname> <given-names>J.</given-names></name>
<name><surname>Gallandt</surname> <given-names>E. R.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Functionality and efficacy of Franklin Robotics&#x2019; Tertill&#x2122; robotic weeder</article-title>. <source>Weed Technol.</source> <volume>35</volume>, <fpage>166</fpage>&#x2013;<lpage>170</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1017/wet.2020.94</pub-id>
</mixed-citation>
</ref>
<ref id="B100">
<mixed-citation publication-type="book">
<person-group person-group-type="author"><collab>Schneider Electric</collab>
</person-group> (<year>2022</year>). <source>All about ultrasonic sensors &amp; How they work</source> (
<publisher-name>Schneider Electric</publisher-name>). Available online at: <uri xlink:href="https://eshop.se.com/in/blog/post/all-about-ultrasonic-sensors-how-they-work.html">https://eshop.se.com/in/blog/post/all-about-ultrasonic-sensors-how-they-work.html</uri> (Accessed <date-in-citation content-type="access-date">January 12, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B101">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Shalal</surname> <given-names>N.</given-names></name>
<name><surname>Low</surname> <given-names>T.</given-names></name>
<name><surname>McCarthy</surname> <given-names>C.</given-names></name>
<name><surname>Hancock</surname> <given-names>N.</given-names></name>
</person-group> (<year>2013</year>). <source>A review of autonomous navigation systems in agricultural environments</source> (<publisher-loc>Barton, Australia</publisher-loc>: 
<publisher-name>Innovative Agricultural Technologies for a Sustainable Future</publisher-name>). Available online at: <uri xlink:href="https://research.usq.edu.au/item/q2456/a-review-of-autonomous-navigation-systems-in-agricultural-environments">https://research.usq.edu.au/item/q2456/a-review-of-autonomous-navigation-systems-in-agricultural-environments</uri> (Accessed <date-in-citation content-type="access-date">January 20, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B102">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shanmugam</surname> <given-names>M.</given-names></name>
<name><surname>Asokan</surname> <given-names>R.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>A machine-vision-based real-time sensor system to control weeds in agricultural fields</article-title>. <source>Sensor Lett.</source> <volume>13</volume>, <fpage>489</fpage>&#x2013;<lpage>495</lpage>.
</mixed-citation>
</ref>
<ref id="B103">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Sivesind</surname> <given-names>E. C.</given-names></name>
<name><surname>Leblanc</surname> <given-names>M. L.</given-names></name>
<name><surname>Cloutier</surname> <given-names>D. C.</given-names></name>
<name><surname>Seguin</surname> <given-names>P.</given-names></name>
<name><surname>Stewart</surname> <given-names>K. A.</given-names></name>
</person-group> (<year>2009</year>). 
<article-title>Weed response to flame weeding at different developmental stages</article-title>. <source>Weed Technol.</source> <volume>23</volume>, <fpage>438</fpage>&#x2013;<lpage>443</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1614/WT-08-155.1</pub-id>
</mixed-citation>
</ref>
<ref id="B104">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Spaeth</surname> <given-names>M.</given-names></name>
<name><surname>Saile</surname> <given-names>M.</given-names></name>
<name><surname>Riehle</surname> <given-names>D.</given-names></name>
<name><surname>Kirchhoff</surname> <given-names>C.</given-names></name>
<name><surname>Gerhards</surname> <given-names>R.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Development and evaluation of a sensor-based slope-compensation system for camera-guided hoeing in maize</article-title>. <source>Biosyst. Eng.</source> <volume>247</volume>, <fpage>91</fpage>&#x2013;<lpage>96</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2024.09.006</pub-id>
</mixed-citation>
</ref>
<ref id="B105">
<mixed-citation publication-type="web">
<person-group person-group-type="author">
<name><surname>Stearns</surname> <given-names>S.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Cultivation tools for mechanical weed control in vegetables | Integrated pest management</article-title>. Available online at: <uri xlink:href="https://ipm.cahnr.uconn.edu/cultivation-tools-for-mechanical-weed-control-in-vegetables/">https://ipm.cahnr.uconn.edu/cultivation-tools-for-mechanical-weed-control-in-vegetables/</uri> (Accessed <date-in-citation content-type="access-date">January 13, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B106">
<mixed-citation publication-type="book">
<person-group person-group-type="author"><collab>Stout</collab>
</person-group> (<year>2020</year>). <source>Mechanical weed control | Solutions organic weeding | Smart cultivator</source> (
<publisher-name>Unleash the Power of AI for Precision Weeding &amp; Cultivating</publisher-name>). Available online at: <uri xlink:href="https://www.stout.ai/">https://www.stout.ai/</uri> (Accessed <date-in-citation content-type="access-date">December 1, 2024</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B107">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Suhail</surname> <given-names>A.</given-names></name>
</person-group> (<year>2022</year>). <source>What are optical sensors used for</source> (
<publisher-name>AZoOptics</publisher-name>). Available online at: <uri xlink:href="https://www.azooptics.com/Article.aspx?ArticleID=2329">https://www.azooptics.com/Article.aspx?ArticleID=2329</uri> (Accessed <date-in-citation content-type="access-date">January 12, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B108">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tang</surname> <given-names>H.</given-names></name>
<name><surname>Xu</surname> <given-names>C.</given-names></name>
<name><surname>Wang</surname> <given-names>Q.</given-names></name>
<name><surname>Zhou</surname> <given-names>W.</given-names></name>
<name><surname>Wang</surname> <given-names>J.</given-names></name>
<name><surname>Xu</surname> <given-names>Y.</given-names></name>
<etal/>
</person-group>. (<year>2021</year>). 
<article-title>Analysis of the mechanism and performance optimization of burying weeding with a self-propelled inter row weeder for paddy field environments</article-title>. <source>Appl. Sci.</source> <volume>11</volume>, <elocation-id>21</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/app11219798</pub-id>
</mixed-citation>
</ref>
<ref id="B109">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tillett</surname> <given-names>N. D.</given-names></name>
<name><surname>Hague</surname> <given-names>T.</given-names></name>
<name><surname>Grundy</surname> <given-names>A. C.</given-names></name>
<name><surname>Dedousis</surname> <given-names>A. P.</given-names></name>
</person-group> (<year>2008</year>). 
<article-title>Mechanical within-row weed control for transplanted crops using computer vision</article-title>. <source>Biosyst. Eng.</source> <volume>99</volume>, <fpage>171</fpage>&#x2013;<lpage>178</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2007.09.026</pub-id>
</mixed-citation>
</ref>
<ref id="B110">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Trajkovski</surname> <given-names>A.</given-names></name>
<name><surname>Bartolj</surname> <given-names>J.</given-names></name>
<name><surname>Levstek</surname> <given-names>T.</given-names></name>
<name><surname>Gode&#x161;a</surname> <given-names>T.</given-names></name>
<name><surname>Se&#x10d;nik</surname> <given-names>M.</given-names></name>
<name><surname>Ho&#x10d;evar</surname> <given-names>M.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Mechanical inter- and intra-row weed control for small-scale vegetable producers</article-title>. <source>Agriculture</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture14091483</pub-id>
</mixed-citation>
</ref>
<ref id="B111">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tshewang</surname> <given-names>S.</given-names></name>
<name><surname>Sindel</surname> <given-names>B. M.</given-names></name>
<name><surname>Ghimiray</surname> <given-names>M.</given-names></name>
<name><surname>Chauhan</surname> <given-names>B. S.</given-names></name>
</person-group> (<year>2016</year>). 
<article-title>Weed management challenges in rice (<italic>Oryza sativa</italic> L.) for food security in Bhutan: A review</article-title>. <source>Crop Prot.</source> <volume>90</volume>, <fpage>117</fpage>&#x2013;<lpage>124</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.cropro.2016.08.031</pub-id>
</mixed-citation>
</ref>
<ref id="B112">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Upadhyay</surname> <given-names>A.</given-names></name>
<name><surname>Zhang</surname> <given-names>Y.</given-names></name>
<name><surname>Koparan</surname> <given-names>C.</given-names></name>
<name><surname>Rai</surname> <given-names>N.</given-names></name>
<name><surname>Howatt</surname> <given-names>K.</given-names></name>
<name><surname>Bajwa</surname> <given-names>S.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>b). 
<article-title>Advances in ground robotic technologies for site-specific weed management in precision agriculture: A review</article-title>. <source>Comput. Electron. Agric.</source> <volume>225</volume>, <elocation-id>109363</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.109363</pub-id>
</mixed-citation>
</ref>
<ref id="B113">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Upadhyay</surname> <given-names>A.</given-names></name>
<name><surname>Zhang</surname> <given-names>Y.</given-names></name>
<name><surname>Koparan</surname> <given-names>C.</given-names></name>
<name><surname>Sun</surname> <given-names>X.</given-names></name>
</person-group> (<year>2024</year>a). 
<article-title>Development and evaluation of a machine vision and deep learning-based smart sprayer system for site-specific weed management in row crops: An edge computing approach</article-title>. <source>J. Agric. Food Res.</source> <volume>18</volume>, <elocation-id>101331</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jafr.2024.101331</pub-id>
</mixed-citation>
</ref>
<ref id="B114">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Van Bruggen</surname> <given-names>A. H. C.</given-names></name>
<name><surname>He</surname> <given-names>M. M.</given-names></name>
<name><surname>Shin</surname> <given-names>K.</given-names></name>
<name><surname>Mai</surname> <given-names>V.</given-names></name>
<name><surname>Jeong</surname> <given-names>K. C.</given-names></name>
<name><surname>Finckh</surname> <given-names>M. R.</given-names></name>
<etal/>
</person-group>. (<year>2018</year>). 
<article-title>Environmental and health effects of the herbicide glyphosate</article-title>. <source>Sci. Total Environ.</source> <volume>616&#x2013;617</volume>, <fpage>255</fpage>&#x2013;<lpage>268</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.scitotenv.2017.10.309</pub-id>, PMID: <pub-id pub-id-type="pmid">29117584</pub-id>
</mixed-citation>
</ref>
<ref id="B115">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>van der Schans</surname> <given-names>D. A.</given-names></name>
<name><surname>Bleeker</surname> <given-names>P. O.</given-names></name>
<name><surname>Molendijk</surname> <given-names>L. P. G.</given-names></name>
<name><surname>Plentinger</surname> <given-names>M. C.</given-names></name>
<name><surname>van der Weide</surname> <given-names>R. Y.</given-names></name>
<name><surname>Lotz</surname> <given-names>L.</given-names></name>
<etal/>
</person-group>. (<year>2006</year>). <source><italic>Practical weed control in arable farming and outdoor vegetable cultivation without chemicals</italic> (No. 352; p.)</source> (
<publisher-name>Applied Plant Research</publisher-name>). Available online at: <uri xlink:href="https://library.wur.nl/WebQuery/wurpubs/346614">https://library.wur.nl/WebQuery/wurpubs/346614</uri> (Accessed <date-in-citation content-type="access-date">January 5, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B116">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Velasquez</surname> <given-names>A. E. B.</given-names></name>
<name><surname>Higuti</surname> <given-names>V. A. H.</given-names></name>
<name><surname>Gasparino</surname> <given-names>M. V.</given-names></name>
<name><surname>Sivakumar</surname> <given-names>A. N.</given-names></name>
<name><surname>Becker</surname> <given-names>M.</given-names></name>
<name><surname>Chowdhary</surname> <given-names>G.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Multi-sensor fusion based robust row following for compact agricultural robots</article-title>. <source>Field Robotics</source> <volume>2</volume>, <fpage>1291</fpage>&#x2013;<lpage>1319</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.55417/fr.2022043</pub-id>
</mixed-citation>
</ref>
<ref id="B117">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Visentin</surname> <given-names>F.</given-names></name>
<name><surname>Cremasco</surname> <given-names>S.</given-names></name>
<name><surname>Sozzi</surname> <given-names>M.</given-names></name>
<name><surname>Signorini</surname> <given-names>L.</given-names></name>
<name><surname>Signorini</surname> <given-names>M.</given-names></name>
<name><surname>Marinello</surname> <given-names>F.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>A mixed-autonomous robotic platform for intra-row and inter-row weed removal for precision agriculture</article-title>. <source>Comput. Electron. Agric.</source> <volume>214</volume>, <elocation-id>108270</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.108270</pub-id>
</mixed-citation>
</ref>
<ref id="B118">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Vit</surname> <given-names>A.</given-names></name>
<name><surname>Shani</surname> <given-names>G.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>Comparing RGB-D sensors for close range outdoor agricultural phenotyping</article-title>. <source>Sensors (Basel Switzerland)</source> <volume>18</volume>, <elocation-id>4413</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s18124413</pub-id>, PMID: <pub-id pub-id-type="pmid">30551636</pub-id>
</mixed-citation>
</ref>
<ref id="B119">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wang</surname> <given-names>Y.</given-names></name>
<name><surname>Ye</surname> <given-names>Y.</given-names></name>
<name><surname>Wu</surname> <given-names>H.</given-names></name>
<name><surname>Tao</surname> <given-names>K.</given-names></name>
<name><surname>Qian</surname> <given-names>M.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>In different weed distributions, the dynamic coverage algorithm for mechanical selective weeding robot</article-title>. <source>Comput. Electron. Agric.</source> <volume>226</volume>, <elocation-id>109486</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.109486</pub-id>
</mixed-citation>
</ref>
<ref id="B120">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wang</surname> <given-names>N.</given-names></name>
<name><surname>Zhang</surname> <given-names>N.</given-names></name>
<name><surname>Dowell</surname> <given-names>F. E.</given-names></name>
<name><surname>Sun</surname> <given-names>Y.</given-names></name>
<name><surname>Peterson</surname> <given-names>D. E.</given-names></name>
</person-group> (<year>2001</year>). 
<article-title>Design of an optical weed sensor usingplant spectral characteristics</article-title>. <source>Trans. ASAE</source> <volume>44</volume>, <fpage>409</fpage>.
</mixed-citation>
</ref>
<ref id="B121">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wang</surname> <given-names>A.</given-names></name>
<name><surname>Zhang</surname> <given-names>W.</given-names></name>
<name><surname>Wei</surname> <given-names>X.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>A review on weed detection using ground-based machine vision and image processing techniques</article-title>. <source>Comput. Electron. Agric.</source> <volume>158</volume>, <fpage>226</fpage>&#x2013;<lpage>240</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2019.02.005</pub-id>
</mixed-citation>
</ref>
<ref id="B122">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wang</surname> <given-names>N.</given-names></name>
<name><surname>Zhang</surname> <given-names>N.</given-names></name>
<name><surname>Wei</surname> <given-names>J.</given-names></name>
<name><surname>Stoll</surname> <given-names>Q.</given-names></name>
<name><surname>Peterson</surname> <given-names>D. E.</given-names></name>
</person-group> (<year>2007</year>). 
<article-title>A real-time, embedded, weed-detection system for use in wheat fields</article-title>. <source>Biosyst. Eng.</source> <volume>98</volume>, <fpage>276</fpage>&#x2013;<lpage>285</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2007.08.007</pub-id>
</mixed-citation>
</ref>
<ref id="B123">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wu</surname> <given-names>X.</given-names></name>
<name><surname>Aravecchia</surname> <given-names>S.</given-names></name>
<name><surname>Lottes</surname> <given-names>P.</given-names></name>
<name><surname>Stachniss</surname> <given-names>C.</given-names></name>
<name><surname>Pradalier</surname> <given-names>C.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Robotic weed control using automated weed and crop classification</article-title>. <source>J. Field Robotics</source> <volume>37</volume>, <fpage>322</fpage>&#x2013;<lpage>340</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/rob.21938</pub-id>
</mixed-citation>
</ref>
<ref id="B124">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xiang</surname> <given-names>M.</given-names></name>
<name><surname>Qu</surname> <given-names>M.</given-names></name>
<name><surname>Wang</surname> <given-names>G.</given-names></name>
<name><surname>Ma</surname> <given-names>Z.</given-names></name>
<name><surname>Chen</surname> <given-names>X.</given-names></name>
<name><surname>Zhou</surname> <given-names>Z.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Crop detection technologies, mechanical weeding executive parts and working performance of intelligent mechanical weeding: A review</article-title>. <source>Front. Plant Sci.</source> <volume>15</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2024.1361002</pub-id>, PMID: <pub-id pub-id-type="pmid">38550283</pub-id>
</mixed-citation>
</ref>
<ref id="B125">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yao</surname> <given-names>Z.</given-names></name>
<name><surname>Zhao</surname> <given-names>C.</given-names></name>
<name><surname>Zhang</surname> <given-names>T.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Agricultural machinery automatic navigation technology</article-title>. <source>iScience</source> <volume>27</volume>, <elocation-id>108714</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.isci.2023.108714</pub-id>, PMID: <pub-id pub-id-type="pmid">38292432</pub-id>
</mixed-citation>
</ref>
<ref id="B126">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ye</surname> <given-names>S.</given-names></name>
<name><surname>Xue</surname> <given-names>X.</given-names></name>
<name><surname>Si</surname> <given-names>S.</given-names></name>
<name><surname>Xu</surname> <given-names>Y.</given-names></name>
<name><surname>Le</surname> <given-names>F.</given-names></name>
<name><surname>Cui</surname> <given-names>L.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Design and testing of an elastic comb reciprocating a soybean plant-to-plant seedling avoidance and weeding device</article-title>. <source>Agriculture</source> <volume>13</volume>, <elocation-id>11</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture13112157</pub-id>
</mixed-citation>
</ref>
<ref id="B127">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yu</surname> <given-names>J.</given-names></name>
<name><surname>Sharpe</surname> <given-names>S. M.</given-names></name>
<name><surname>Boyd</surname> <given-names>N. S.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Fumigants alone or in combination with herbicide for weed management in bell pepper (<italic>Capsicum annuum</italic>)</article-title>. <source>Crop Prot.</source> <volume>118</volume>, <fpage>31</fpage>&#x2013;<lpage>35</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.cropro.2018.12.010</pub-id>
</mixed-citation>
</ref>
<ref id="B128">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zawada</surname> <given-names>M.</given-names></name>
<name><surname>Legutko</surname> <given-names>S.</given-names></name>
<name><surname>Go&#x15b;cia&#x144;ska-&#x141;owi&#x144;ska</surname> <given-names>J.</given-names></name>
<name><surname>Szymczyk</surname> <given-names>S.</given-names></name>
<name><surname>Nijak</surname> <given-names>M.</given-names></name>
<name><surname>Wojciechowski</surname> <given-names>J.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Mechanical weed control systems: methods and effectiveness</article-title>. <source>Sustainability</source> <volume>15</volume>, <elocation-id>21</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/su152115206</pub-id>
</mixed-citation>
</ref>
<ref id="B129">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhao</surname> <given-names>C.-T.</given-names></name>
<name><surname>Wang</surname> <given-names>R.-F.</given-names></name>
<name><surname>Tu</surname> <given-names>Y.-H.</given-names></name>
<name><surname>Pang</surname> <given-names>X.-X.</given-names></name>
<name><surname>Su</surname> <given-names>W.-H.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Automatic lettuce weed detection and classification based on optimized convolutional neural networks for robotic weed control</article-title>. <source>Agronomy</source> <volume>14</volume>, <elocation-id>12</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy14122838</pub-id>
</mixed-citation>
</ref>
<ref id="B130">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Zimdahl</surname> <given-names>R. L.</given-names></name>
</person-group> (<year>2018</year>). &#x201c;
<article-title>Chapter 10&#x2014;Methods of weed management</article-title>,&#x201d; in <source>Fundamentals of weed science</source>, <edition>Fifth Edition</edition>. Ed. 
<person-group person-group-type="editor">
<name><surname>Zimdahl</surname> <given-names>R. L.</given-names></name>
</person-group> (
<publisher-name>Academic Press</publisher-name>), <fpage>271</fpage>&#x2013;<lpage>335</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/B978-0-12-811143-7.00010-X</pub-id>
</mixed-citation>
</ref>
</ref-list>
<fn-group>
<fn id="n1" fn-type="custom" custom-type="edited-by">
<p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/1107972">Ning Yang</ext-link>, Jiangsu University, China</p></fn>
<fn id="n2" fn-type="custom" custom-type="reviewed-by">
<p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3267697">Shenyu Zheng</ext-link>, Jiangsu University, China</p>
<p><ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3271362">Jinkang Jiao</ext-link>, Agricultural Equipment Institute of Hunan, China</p></fn>
</fn-group>
</back>
</article>