<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="review-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Plant Sci.</journal-id>
<journal-title>Frontiers in Plant Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Plant Sci.</abbrev-journal-title>
<issn pub-type="epub">1664-462X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpls.2025.1637241</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Plant Science</subject>
<subj-group>
<subject>Review</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>A review of&#x2002;plant leaf&#x2002;disease&#x2002;identification by deep learning algorithms</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Zhao</surname>
<given-names>Junmin</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="author-notes" rid="fn001">
<sup>*</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/3083143/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Xu</surname>
<given-names>Laixiang</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Ma</surname>
<given-names>Zizhen</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Li</surname>
<given-names>Juncai</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Wang</surname>
<given-names>Xiaowei</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Liu</surname>
<given-names>Yunchang</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Du</surname>
<given-names>Xiaojie</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
</contrib-group>
<aff id="aff1">
<sup>1</sup>
<institution>School of Computer and Data Science, Research Center of Smart City and Big Data Engineering of Henan Province, Henan University of Urban Construction</institution>, <addr-line>Pingdingshan</addr-line>,&#xa0;<country>China</country>
</aff>
<aff id="aff2">
<sup>2</sup>
<institution>School of Computer and Data Science, Research Center of Smart City and Big Data Engineering of Henan Province, Innovation Laboratory of Smart Transportation and Big Data Development of Henan Province, Henan University of Urban Construction</institution>, <addr-line>Pingdingshan</addr-line>,&#xa0;<country>China</country>
</aff>
<aff id="aff3">
<sup>3</sup>
<institution>School of Computer and Data Science, Henan University of Urban Construction</institution>, <addr-line>Pingdingshan</addr-line>,&#xa0;<country>China</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>Edited by: Alejandro Isabel Luna-Maldonado, Autonomous University of Nuevo Le&#xf3;n, Mexico</p>
</fn>
<fn fn-type="edited-by">
<p>Reviewed by: Wajahat Akbar, Chang&#x2019;an University, China</p>
<p>Burhan Duman, Isparta University of Applied Sciences, T&#xfc;rkiye</p>
</fn>
<fn fn-type="corresp" id="fn001">
<p>*Correspondence: Junmin Zhao, <email xlink:href="mailto:zhaojunminhuuc@yeah.net">zhaojunminhuuc@yeah.net</email>
</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>20</day>
<month>08</month>
<year>2025</year>
</pub-date>
<pub-date pub-type="collection">
<year>2025</year>
</pub-date>
<volume>16</volume>
<elocation-id>1637241</elocation-id>
<history>
<date date-type="received">
<day>29</day>
<month>05</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>28</day>
<month>07</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2025 Zhao, Xu, Ma, Li, Wang, Liu and Du.</copyright-statement>
<copyright-year>2025</copyright-year>
<copyright-holder>Zhao, Xu, Ma, Li, Wang, Liu and Du</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<p>Plant leaf disease control is crucial given the prevalence of plant leaf diseases around the world. The most crucial aspect of controlling plant leaf diseases is appropriately identifying them. Deep learning-based plant leaf disease recognition is a viable alternative to artificial methods that are useless and inaccurate. The proposed work aims to combine plant leaf disease datasets from various countries, review current research and progress in deep learning algorithms for plant disease recognition, and explain how different types of data are developed and used in this area using different deep learning networks. The feasibility of several network models for deep learning-based plant leaf disease detection is discussed. Solving shortcomings such as sunlight irradiation in plant planting conditions, similar disease incidence of different plant leaf diseases, and varied symptoms of the same disease in different damage periods or infection degrees are all essential study topics in the growth of this discipline. To address the concerns raised above and establish the field&#x2019;s future development potential, we must research high-performance neural networks based on the benefits and downsides of diverse networks. The proposed work can serve as a foundation for future research and breakthroughs in the identification of plant leaf diseases.</p>
</abstract>
<kwd-group>
<kwd>plant disease control</kwd>
<kwd>plant leaf disease</kwd>
<kwd>deep learning</kwd>
<kwd>disease identification</kwd>
<kwd>convolutional neural network</kwd>
</kwd-group>
<counts>
<fig-count count="8"/>
<table-count count="6"/>
<equation-count count="0"/>
<ref-count count="143"/>
<page-count count="28"/>
<word-count count="14173"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-in-acceptance</meta-name>
<meta-value>Technical Advances in Plant Science</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>The ongoing strengthening of the greenhouse effect raises the Earth&#x2019;s temperature and alters precipitation patterns. These factors significantly influence the occurrence, distribution, and transmission of plant leaf diseases. Pathogens and pests reproduce more quickly when temperatures rise (<xref ref-type="bibr" rid="B20">Chinnu et&#xa0;al., 2024</xref>). It leads to the rapid spread of pests and illnesses. Changes in precipitation patterns can lead to root damage or drought, lowering plant disease resistance. Plants are essential for managing the Earth&#x2019;s climate. Plant diseases can worsen climate change, reduce crop quality and output, cause food shortages, and ultimately jeopardize human existence. Plant diseases can cause direct harm to plant tissues in a variety of ways, compromising plant aesthetics and market value. When it is severe, it can kill a huge number of plants. Early artificial identification of plant diseases had numerous limitations. Farmers have limited expertise. They have difficulties assessing the severity of disease progress. This would result in ineffective governance and excessive pesticide spraying, which will squander resources and pollute the environment (<xref ref-type="bibr" rid="B106">Strandberg et&#xa0;al., 2024</xref>). As a result, promptly, conveniently, and simply recognizing plant leaf diseases and improving plant disease control are critical for ensuring crop quality and yield, solving food concerns, sustaining human living conditions, and safeguarding the environment. This is also a significant motivation for studying this subject.</p>
<p>According to relevant information, there are many kinds of plant diseases. Plant diseases cause direct damage to plant tissues through parasitism, absorption of nutrients, destruction of cell structure, such as leaf spots, rot, fruit deformation, fruit cracking, etc. These damages not only affect the beauty of plants but also may reduce their market value; more serious ones can cause a large number of plant deaths (<xref ref-type="bibr" rid="B95">Roeswitawati et&#xa0;al., 2024</xref>). Accurate detection of plant leaf diseases is an important step in plant disease management. Due to the limitation of early technology, the identification of plant diseases is mainly manual identification, but due to the diversity and similarity of diseases and the low level of knowledge of some farmers, they cannot correctly judge the development degree of plant diseases, resulting in failure to prescribe the right medicine, thus causing the treatment process to be difficult to effectively control the development of plant diseases. In addition, in the process of treatment, a large number of pesticides are sprayed, which not only causes waste of resources but also causes environmental pollution and other problems (<xref ref-type="bibr" rid="B2">Abaineh et&#xa0;al., 2024</xref>). As a result, speedy, convenient, and simple detection of plant leaf diseases is critical to plant leaf disease control, as well as the field&#x2019;s future development direction (<xref ref-type="bibr" rid="B48">Kalimuthu et&#xa0;al., 2021</xref>).</p>
<p>Although deep learning has achieved some results in identifying plant leaf diseases, it still faces numerous obstacles. How to improve the model&#x2019;s generalization capabilities in multiple contexts and illness kinds while also better adapting to practical application scenarios. In the detection of plant leaf diseases, deep learning suffers from imbalanced data and erroneous labeling. This will reduce the model&#x2019;s reliability and stability. Deep learning models that balance accuracy and computational resource consumption will be able to recognize plant diseases more efficiently on resource-constrained devices.</p>
<p>The proposed work discusses plant leaf disease recognition strategies based on on deep learning algorithms. Although deep learning did not attract widespread attention at the time due to hardware constraints, its potential gradually emerged. <xref ref-type="bibr" rid="B53">Krizhevsky et&#xa0;al. (2012)</xref> successfully implemented the famous convolutional neural network AlexNet on a graphics processor. At the Stanford ImageNet Large-Scale Visual Recognition Challenge, AlexNet made a breakthrough in classification accuracy far beyond other early computer vision methods. This accomplishment not only demonstrates deep learning&#x2019;s formidable potential but also encourages the growth of diverse convolutional neural networks (CNN), quickening the field&#x2019;s progress. Deep learning is being utilized more and more in the field of plant leaf disease identification, and its significance is becoming more and more apparent.</p>
<p>Deep learning is a branch of machine learning (<xref ref-type="bibr" rid="B33">Ganaie et&#xa0;al., 2022</xref>) that is based on artificial neural networks and learns complex data features through multiple hidden layers. Deep learning is the core technology to realize artificial intelligence, especially in the fields of image processing, speech recognition, natural language processing, and other significant breakthroughs (<xref ref-type="bibr" rid="B80">Matteo and J. Brendan, 2024</xref>). Deep learning models can automatically extract useful features from the original dataset, greatly reducing the burden of artificial feature recognition. And a deep learning model trained on a huge number of data points can learn the inherent law of data, even if new data does not show high prediction ability. Deep learning models may learn high-level and abstract data features, providing powerful representation capabilities (<xref ref-type="bibr" rid="B96">Rose and Rui, 2024</xref>). The subject of plant disease recognition has grown significantly as a result of deep learning intervention, with a number of reliable research outcomes emerging. Plant disease detection based on deep learning models has also become a hot spot in this field. Our work reviews the research and progress in plant leaf disease recognition based on deep learning models, as well as the challenges and opportunities faced by the field in recent years.</p>
<p>The suggested study examines the research and development of deep learning in plant leaf disease recognition. We present a thorough assessment of deep learning applications in plant disease recognition, taking into account recent research achievements. At the same time, we did a thorough review of the obstacles encountered in the field of plant disease diagnosis and investigated potential solutions. This establishes a clear path and reference for our future study. By summarizing and analyzing existing research findings, it is possible to encourage the further development of plant leaf disease identification technology, provide assistance in solving practical problems in plant disease control, and steer the field toward a more efficient, accurate, and practical direction.</p>
<p>We investigated convolutional neural networks, YOLO, structural optimization, and performance improvement strategies for lightweight models in disease recognition applications. We investigated the practical efficiency of several algorithms in difficult situations. It addresses accuracy, real-time performance, and the simultaneous detection of various diseases.</p>
<p>We mainly used Web of Science, the institute of electrical and electronics engineers (IEEE) Xplore, ScienceDirect, SpringerLink, and Google Scholar academic databases, focusing on literature from 2018 to 2025. This literature systematically reviews and integrates the research progress of deep learning in plant leaf disease recognition. The keywords we use are plant leaf disease, crop disease, deep learning, convolutional neural network, object detection, recognition, detection, and classification. Our inclusion criteria include deep learning-based leaf disease detection research, academic papers that use public or self-built field datasets, non-deep learning methods, pure theoretical models without image data, and research on non-leaf diseases such as root diseases. We acquired 580 papers after initial title and abstract screening and retained 210 following full-text reading. We also augmented 35 relevant articles with reference tracing. We supplemented and included over 20 prominent open-source datasets by searching Kaggle, GitHub, and academic institution official websites at the same time. We ultimately reviewed and integrated over 140 pieces of literature on algorithm creation, lightweight models, and multi-scenario applications.</p>
<p>The rest of the article is structured as follows: Section 2 offers a thorough analysis of each dataset and a thorough introduction to open-source databases both domestically and abroad. Plant species, sample size, and disease kinds are all included. Section 3 examines the specific performance and impacts of various implementation frameworks and provides an overview of plant leaf disease recognition algorithms based on one-stage detectors, two-stage detectors, and anchor-free frameworks. Section 4 examines the performance of alternative models and lightweight models under deep learning techniques. Section 5 concludes with a thorough analysis and forecast.</p>
</sec>
<sec id="s2">
<label>2</label>
<title>Plant leaf disease image database</title>
<p>Deep learning models, based on large data sets, can learn the inherent laws of data at a deeper level and have a higher accuracy rate for predicting new data that has not yet been seen (<xref ref-type="bibr" rid="B83">Mingle et&#xa0;al., 2024</xref>). Thus, datasets are the basis for building deep models, and more quantitative and high-quality datasets tend to build more successful deep learning models (<xref ref-type="bibr" rid="B5">Alsaghir et&#xa0;al., 2022</xref>). A high-quality deep learning model can more accurately and effectively identify the type of plant disease, disease degree, and other information. Relevant individuals can provide the appropriate medicine based on the results of the deep learning model, resulting in more scientific and effective governance outcomes. <xref ref-type="table" rid="T1">
<bold>Table&#xa0;1</bold>
</xref> contains quality plant disease picture databases.</p>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Publicly available plant disease image data sets and websites.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="left">Dataset</th>
<th valign="top" align="left">No. of Images</th>
<th valign="top" align="left">Disease Annotation</th>
<th valign="top" align="left">Source Organization</th>
<th valign="top" align="left">Datasets Links</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">PlantVillage</td>
<td valign="top" align="left">54,036</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Penn State University</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://plantvillage.psu.edu/">https://plantvillage.psu.edu/</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">CVPR 2020-FGVG7</td>
<td valign="top" align="left">3651</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Plant Pathology Challenge competition</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/c/plant-pathology-2020-fgvc7/data">https://www.kaggle.com/c/plant-pathology-2020-fgvc7/data</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Cucumber Plant Diseases Dataset</td>
<td valign="top" align="left">695</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Hefei Institute of Intelligent Machinery, Chinese Academy of Sciences</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/kareem3egm/cucumber-plant-diseases-dataset">https://www.kaggle.com/kareem3egm/cucumber-plant-diseases-dataset</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">New Plant Disease Dataset</td>
<td valign="top" align="left">87000</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Colorado State University</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/vipoooool/new-plant-diseases-dataset">https://www.kaggle.com/vipoooool/new-plant-diseases-dataset</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">PlantDoc</td>
<td valign="top" align="left">2598</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Indian Institute of Technology</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://github.com/pratikkayal/PlantDoc-Dataset">https://github.com/pratikkayal/PlantDoc-Dataset</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Rice Diseases Image Dataset</td>
<td valign="top" align="left">5447</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">jonathando.dev</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/minhhuy2810/rice-diseases-image-dataset">https://www.kaggle.com/minhhuy2810/rice-diseases-image-dataset</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">PlantPathology Apple Dataset</td>
<td valign="top" align="left">3171</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Plant Pathology Challenge competition</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/lextoumbourou/plantvillageapplecolor">https://www.kaggle.com/lextoumbourou/plantvillageapplecolor</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">New Plant Diseases Dataset (Augmented)</td>
<td valign="top" align="left">22900</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Penn State University</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/noulam/tomato">https://www.kaggle.com/noulam/tomato</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">PlantifyDr Dataset</td>
<td valign="top" align="left">125000</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Indian Institute of Technology</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/lavaman151/plantifydr-dataset">https://www.kaggle.com/lavaman151/plantifydr-dataset</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Corn Leaf Diseases(NLB)</td>
<td valign="top" align="left">4115</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Kathmandu University</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/rabbityashow/corn-leaf-diseasesnlb">https://www.kaggle.com/rabbityashow/corn-leaf-diseasesnlb</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Durian Leaf Disease Dataset</td>
<td valign="top" align="left">4437</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Ton Duc Thang<break/>University</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/datasets/cthng123/durian-leaf-disease-dataset">https://www.kaggle.com/datasets/cthng123/durian-leaf-disease-dataset</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Banana Leaf Spot Diseases Dataset</td>
<td valign="top" align="left">5909</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Bangabandhu Sheikh Mujibur Rahman Agricultural University</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/datasets/shifatearman/bananalsd">https://www.kaggle.com/datasets/shifatearman/bananalsd</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Jute Leaf Disease Dataset</td>
<td valign="top" align="left">1820</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Varendra University</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/datasets/srkuhin/jute-leaf-disease-detection">https://www.kaggle.com/datasets/srkuhin/jute-leaf-disease-detection</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Potato Disease Leaf Dataset (PLD)</td>
<td valign="top" align="left">4062</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">eFAIDA TECHNOLOGIES, University of Okara</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/datasets/rizwan123456789/potato-disease-leaf-datasetpld">https://www.kaggle.com/datasets/rizwan123456789/potato-disease-leaf-datasetpld</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Potato Leaf (Healthy and Late Blight)</td>
<td valign="top" align="left">426</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Software Engineer at Velou</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/datasets/nirmalsankalana/potato-leaf-healthy-and-late-blight">https://www.kaggle.com/datasets/nirmalsankalana/potato-leaf-healthy-and-late-blight</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Eggplant Disease Recognition Dataset</td>
<td valign="top" align="left">3136</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Marketing Manager at Jazira &amp; Victoria Ltd</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/datasets/kamalmoha/eggplant-disease-recognition-dataset">https://www.kaggle.com/datasets/kamalmoha/eggplant-disease-recognition-dataset</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Strawberry Disease Detection Dataset</td>
<td valign="top" align="left">2500</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Jeonbuk National University</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/datasets/usmanafzaal/strawberry-disease-detection-dataset?select=train">https://www.kaggle.com/datasets/usmanafzaal/strawberry-disease-detection-dataset?select=train</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">PDD271</td>
<td valign="top" align="left">220592</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Beijing Puhui Sannong Technology Co. Ltd</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://drive.google.com/file/d/1QMR1bUfEuMbZz-Mb3u2IXdbMgz7oj2Pe/view">https://drive.google.com/file/d/1QMR1bUfEuMbZz-Mb3u2IXdbMgz7oj2Pe/view</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Agricultural Disease and Pest</td>
<td valign="top" align="left">100000</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Hefei Institute of Physical Sciences, Chinese Academy of Sciences</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="http://www.icgroupcas.cn/website_bchtk/index.html">http://www.icgroupcas.cn/website_bchtk/index.html</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Rice Leaf Diseases Data SetCotton plant</td>
<td valign="top" align="left">120</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Dharmsinh Desai University</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://archive.ics.uci.edu/ml/datasets/Rice+Leaf+Diseases">https://archive.ics.uci.edu/ml/datasets/Rice+Leaf+Diseases</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Cotton plant diseases</td>
<td valign="top" align="left">234</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Personal</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.datacastle.cn/dataset_description.html?type=dataset&amp;id=2465">https://www.datacastle.cn/dataset_description.html?type=dataset&amp;id=2465</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Plant disease recognition dataset</td>
<td valign="top" align="left">1530</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">University of Asia Pacific</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/rashikrahmanpritom/plant-disease-recognition-dataset">https://www.kaggle.com/rashikrahmanpritom/plant-disease-recognition-dataset</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">Sugarcane Plant Diseases Dataset</td>
<td valign="top" align="left">19926</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">University of Washington</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/datasets/akilesh253/sugarcane-plant-diseases-dataset">https://www.kaggle.com/datasets/akilesh253/sugarcane-plant-diseases-dataset</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">soybean.leaf.dataset</td>
<td valign="top" align="left">6410</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Universidade do Estado de Mato Grosso</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/datasets/maeloisamignoni/soybeanleafdataset">https://www.kaggle.com/datasets/maeloisamignoni/soybeanleafdataset</ext-link>
</td>
</tr>
<tr>
<td valign="top" align="left">PDDB</td>
<td valign="top" align="left">50000</td>
<td valign="top" align="left">Expert</td>
<td valign="top" align="left">Embrapa CNPTIA</td>
<td valign="top" align="left">
<ext-link ext-link-type="uri" xlink:href="https://www.digipathos-rep.cnptia.embrapa.br/jspui/">https://www.digipathos-rep.cnptia.embrapa.br/jspui/</ext-link>
</td>
</tr>
</tbody>
</table>
</table-wrap>
<sec id="s2_1">
<label>2.1</label>
<title>Foreign major plant leaf disease image database</title>
<list list-type="order">
<list-item>
<p>Plant village. This dataset is a public dataset created jointly by David Hughes and Marcel Salath&#xe9; and is one of the most used databases today. Plant Village includes 14 plants and 26 diseases, a total of 38 categories, including 54,036 images, which is very suitable for plant disease detection and recognition model training, but most of the images in Plant Village are taken in the laboratory or under a single background, and fewer images are taken under complex natural conditions.</p>
</list-item>
<list-item>
<p>Plant pathology 2020-FGVC7. Mainly high-quality annotated apple images, including apple scab, apple rust, multiple disease coexistence, and healthy leaves, totaling 3651 images. Among them, there are 1200 images of apple scab, 1399 images of apple rust, 187 images of coexistence of multiple diseases, and 865 images of healthy leaves.</p>
</list-item>
<list-item>
<p>Cucumber plant diseases dataset. The dataset, shared by Karim Negm, contains 695 images of diseased and healthy cucumbers taken under natural conditions in the field.</p>
</list-item>
<list-item>
<p>New plant disease dataset. The data set was recreated by Samir Bhattarai using data enhancement techniques. The dataset consists of 38 different categories, including 87,000 healthy and non-healthy leaves, but the image background of the data is a single background.</p>
</list-item>
<list-item>
<p>PlantDoc. Created by Singh et&#xa0;al (<xref ref-type="bibr" rid="B103">Singh et&#xa0;al., 2020</xref>), it contains 2598 images covering 13 plant species and 17 disease types.</p>
</list-item>
<list-item>
<p>Rice diseases image dataset. Covering brown spot disease, leaf spot disease, ironworm disease, and healthy leaves, a total of 5447 images.</p>
</list-item>
<list-item>
<p>Plant pathology apple dataset. Derived from Plant-village, including apple scab, black rot, cedar apple rust, and healthy leaves, containing 3171 images.</p>
</list-item>
<list-item>
<p>New plant diseases dataset (Augmented). The dataset is a dataset of related tomatoes, derived from PlantVillage data through data augmentation techniques, including 9 diseased tomatoes and 1 healthy leaf, for a total of 22,900 images.</p>
</list-item>
<list-item>
<p>PlantifyDr dataset. The dataset contains 10 different plant types, primarily apples, bell peppers, cherries, oranges, corn, grapes, peaches, potatoes, strawberries, and tomatoes. 125,000 images of 37 plant disease types.</p>
</list-item>
<list-item>
<p>Plant disease recognition dataset. The dataset contains 1530 images of healthy, powdery, and rusty.</p>
</list-item>
<list-item>
<p>Corn leaf diseases (NLB). 4115, including diseased and healthy corn.</p>
</list-item>
<list-item>
<p>Durian leaf disease dataset. The dataset includes images of different types of durian leaf disease, including algal blotch, heteroscab, leaf blight, and phomopsis leaf blotch, and images of healthy leaves. There are 4437 images.</p>
</list-item>
<list-item>
<p>Banana leaf spot diseases dataset. The dataset covers the three major banana leaf spot diseases, cigatoka, kodana, and polychaete, as well as images of healthy banana leaves. The dataset consists primarily of two subsets. The original set contains 937 images, which are divided into 4 categories and provided in JPG format. They are the most original image data and visually show the various states of banana leaves. The enhanced set is extended by a series of advanced enhancement techniques on the basis of the original set. These enhancement techniques include Gaussian blur, horizontal flip, cropping, linear contrast adjustment, cropping, translation, rotational cropping, etc. Through these operations, 400 images are added to each class, making the total number of images in the enhancement set reach 1600.</p>
</list-item>
<list-item>
<p>Jute leaf disease dataset. There were 609 images of cercospora leaf spot in the comprehensive data set of jute crop disease detection. There are 647 images of golden mosaic in the dataset.</p>
</list-item>
<list-item>
<p>Potato disease leaf dataset (PLD). The Potato Leaf Disease dataset contains 4062 images collected from the Punjab region of central Pakistan.</p>
</list-item>
<list-item>
<p>Potato leaf (Healthy and Late Blight). Heterogeneous image datasets were collected from potato farms in Holeta, Ethiopia, with the help of two plant pathologists. The dataset is correctly labeled with two classes, &#x201c;Healthy&#x201d; and &#x201c;Late Blight,&#x201d; and the images are diverse, meaning some images were captured using less noisy background images while others were captured using highly noisy environments. 63 images were collected under the category of &#x201c;late blight,&#x201d; and 363 images were collected under the category of &#x201c;health.&#x201d; Finally, the data sets prepared can be used for different studies aimed at plant disease detection and classification.</p>
</list-item>
<list-item>
<p>Eggplant disease recognition dataset. This dataset includes seven different eggplant diseases: healthy leaves, pest disease, leaf spot disease, mosaic virus disease, small leaf disease, white mold, and blight. The dataset was based on the original 392 eggplant images, and various technical image processing techniques were applied by increasing the number of data points, resulting in a total of 3136 enhanced photos from the original images.</p>
</list-item>
<list-item>
<p>Strawberry disease detection dataset. It includes 2500 images and provides segmentation annotation files for 7 different types of strawberry diseases.</p>
</list-item>
<list-item>
<p>Sugarcane plant diseases dataset. The dataset contains 19926 images of sugarcane leaves. Among them, 4800 were bacterial blight, 3132 healthy leaves, 2772 mosaic leaves, 3108 red rot, 3048 rust, and 3030 yellow rot.</p>
</list-item>
<list-item>
<p>soybean.leaf.dataset. The dataset consists of three image folders: Caterpillar, Diabrotica Speciosa, and Healthy, for a total of 6410 images. Caterpillar 3309 images, Diabrotica Speciosa 2205 images, and healthy 896 images.</p>
</list-item>
</list>
</sec>
<sec id="s2_2">
<label>2.2</label>
<title>Plant leaf disease image database in China</title>
<list list-type="order">
<list-item>
<p>PDD27. The plant disease dataset PDD271 collected by <xref ref-type="bibr" rid="B68">Liu et&#xa0;al. (2021)</xref> includes 220,592 plant leaf images covering 271 plant disease categories. Each plant disease category contains at least 500 images of more than 200 plants. And each image is marked by experts in time when it is collected. After collection, experts who are not involved in the labeling work check to ensure the correctness of the label.</p>
</list-item>
<list-item>
<p>Agricultural disease and pest research database (IDADP). The dataset is a comprehensive database jointly constructed by the Hefei Institute of Intelligent Machinery, the Institute of Subtropical Agroecology, and the Institute of Remote Sensing and Digital Earth, China Academy of Sciences, covering various plant types such as field crops, fruits and vegetables, and various disease types such as fungi, bacteria, and viruses. Each pest includes hundreds to thousands of images. Most of the images were taken with SLR cameras with a resolution of no less than 20 million pixels (6000&#xd7;4000, 5472&#xd7;3648), and a few were taken with mobile phones with a resolution of 4128&#xd7;2322 pixels. Moreover, most images are taken under natural conditions and can be applied to the identification and detection of plant diseases under complex natural conditions.</p>
</list-item>
<list-item>
<p>Plant disease symptom database (PDDB). The dataset is a free database covering 21 plant species, 171 disease types, and nearly 50,000 images collected. Eighty-five percent of the images were taken under realistic conditions, and the rest were taken under controlled conditions. The images were all captured by digital cameras and mobile devices with resolutions ranging from 1 to 24 megapixels (<xref ref-type="bibr" rid="B10">Barbedo et&#xa0;al., 2018</xref>) and were annotated by experts.</p>
</list-item>
<list-item>
<p>Rice leaf disease dataset. The dataset images were taken under direct sunlight against a white background, mainly for rice bacterial blight, brown spot, and smut, with 40 shots of each disease, totaling 120 shots.</p>
</list-item>
<list-item>
<p>Cotton plant disease. The dataset contains 1522 high-quality images of cotton pests and diseases.</p>
</list-item>
</list>
</sec>
</sec>
<sec id="s3">
<label>3</label>
<title>Advances in plant leaf disease detection</title>
<p>Plant leaf disease detection is the process of recognizing and locating infected areas, as well as the precise location of plant leaf disease in a complicated natural environment (<xref ref-type="bibr" rid="B1">Abade et&#xa0;al., 2021</xref>). This technology is the basis for realizing accurate classification and identification of plant leaf diseases and evaluating the damage degree of diseases. It is also the key link for accurately locating disease areas and guiding plant protection equipment to implement precise spraying.</p>
<p>Early plant disease target detection algorithms usually use a sliding window strategy to generate candidate regions, then extract the features of these regions and classify them with classifiers to finally determine the target regions (<xref ref-type="bibr" rid="B11">Ben et&#xa0;al., 2024</xref>). Viola-Jones detection, histogram of oriented gradient detection, and deformable component modeling are examples of common techniques. The sliding window method traverses the image by setting different scales and aspect ratios. Although this method can ensure that no potential disease regions are missed, it will produce a large number of redundant candidate windows, resulting in a significant increase in computational effort. In addition, it takes a lot of time to traverse the whole image, which makes the detection efficiency low. On the other hand, the feature extraction of candidate regions depends on manual design, and the extracted features mainly focus on the underlying information, such as the color and shape of diseases, whichamakes the robustness of disease detection poor. The a daptive boosting (AdaBoost) and support vector machine (SVM) are usually used in classifiers, but these methods have the problems of slow recognition speed and low accuracy.</p>
<sec id="s3_1">
<label>3.1</label>
<title>Plant leaf disease detection based on target detection framework</title>
<p>Emerging detection algorithms such as the deep learning-driven region-based convolutional neural networks (R-CNN) series, you only look once (YOLO), single shot multi-box detector (SSD), and CenterNet show significant performance advantages over earlier plant target detection algorithms. In the object detection framework of deep learning, these algorithms can be divided into two main categories: two-stage detectors and one-stage detectors (<xref ref-type="bibr" rid="B14">Bsher et&#xa0;al., 2024</xref>).</p>
<sec id="s3_1_1">
<label>3.1.1</label>
<title>Plant leaf disease detection based on two-stage detectors</title>
<p>Two-stage first generates a sparse set of candidate boxes using a candidate box generator, extracts features from each candidate box, and then predicts the category of the candidate box region using a region classifier in <xref ref-type="fig" rid="f1">
<bold>Figure&#xa0;1</bold>
</xref>.</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>Two-stage detection algorithm based on apple powdery mildew leaf diseases. The flowchart of the detection model using Faster R-CNN. The result picture contains the predicted disease category, the confidence, and the location of the disease marked by an orange box. The region proposal network (RPN) generates multiple anchor boxes with preset proportions and scales on the feature map, and then performs classification and coordinate regression to screen out candidate boxes that may contain the target.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1637241-g001.tif">
<alt-text content-type="machine-generated">Flowchart illustrates a process for classifying plant diseases using images. An input sample image shows a leaf with powdery mildew, confirmed with high confidence. The process involves Res2Net, RPN, Fast R-CNN, and components like feature map, RoLAlign, classification, proposals, and bounding box regression, leading to the test result displaying &#x201c;Powdery_mildew 1.00&#x201d;.</alt-text>
</graphic>
</fig>
<p>
<xref ref-type="bibr" rid="B56">Li M. et&#xa0;al. (2023)</xref> designed an integration model integrating one-stage and two-stage target detection networks. The YOLO and Faster-RCNN are integrated to maximize the size of the target frame using a clustering approach, hence improving the target&#x2019;s detection effectiveness. Transfer learning was used to improve the training speed of the model, and finally the average accuracy of 37 pests and 8 diseases was 85.2%, which was significantly higher than other control models. <xref ref-type="bibr" rid="B34">Gong and Zhang (2023)</xref> suggested an enhanced Faster R-CNN approach for detecting apple leaf disease. Res2Net and feature pyramid network are described as feature extraction networks. A region of interest align (RoIAlign) replaced region of interest pooling (RoIPool) to increase candidate region accuracy. The accuracy rate is 63.1%. <xref ref-type="bibr" rid="B119">Wu and Jiang (2023)</xref> created a pine wilt disease (PWD) detection and extraction algorithm based on Mask R-CNN. Firstly, the advanced ConvNeXt network is used to improve image feature extraction. In order to improve data sharing under low batch-size training, the original multi-scale structure is converted to PA-FPN and normalized. Lastly, a branch is introduced to the Mask module to enhance the fusion-based object extraction capacity. The improved method achieves 91.9% on the PWD dataset. <xref ref-type="bibr" rid="B139">Zhao S. et&#xa0;al. (2022)</xref> reported a new Faster R-CNN architecture, composed of multi-scale fusion ResNet, FPN, and convolutional block attention module (CBAM) blocks, which can effectively extract rich strawberry disease features. Compared with Mask R-CNN and YOLO-v3, this model has higher accuracy and faster detection operation requirements, and its accuracy can reach 92.18%. <xref ref-type="bibr" rid="B114">Wang M. et&#xa0;al. (2023)</xref> exhibited an on-site sweet potato leaf detection method based on a modified Faster R-CNN framework and visual attention mechanism. The accuracy of the investigated method reaches 95.7%, 2.9% higher than the original Faster R-CNN and 7% higher than YOLOv5. The constructed method achieves good performance in detecting dense leaves or occluding leaves. <xref ref-type="bibr" rid="B57">Li et&#xa0;al. (2022)</xref> used a convolutional neural network for multi-scale feature fusion in their potato leaf disease detection approach. The approach offered a workable solution for the diagnosis of maize leaf disease because its average accuracy is higher than that of YOLOv5, Faster R-CNN, and CenterNet.</p>
<p>A convolutional neural network (CNN), based on regional recommendations, has achieved some results in the field of plant leaf disease detection, displaying innovation and application potential, although there are still numerous hurdles. From a data standpoint, several studies use synthetic data to improve model performance. Although this method increases accuracy, there are distinctions between synthesized and real-world data. This may have an impact on the model&#x2019;s capacity to generalize in complex real-world scenarios. In terms of model complexity, some models include numerous sophisticated network structures. Although this method improves feature and object extraction capabilities, it also results in higher processing costs and lengthier training and inference periods. This presents certain issues for the deployment of limited resources in agricultural areas. When it comes to model comparison, different research employs different datasets and evaluation metrics, making it impossible to compare the benefits and drawbacks of models objectively. Some research concentrates on various plant diseases, but their indicators lack a common reference. This is not favorable to technical discussions. Additionally, the model&#x2019;s interpretability is often poor. Deep learning, as a black box, makes it difficult for farmers to understand its decision-making basis, reducing confidence and acceptability in practical implementations.</p>
<p>There are R-CNN, Fast-RCNN, and Faster-RCNN. <xref ref-type="bibr" rid="B37">Haruna et&#xa0;al. (2023)</xref> developed a data augmentation pipeline that used style-generative adversarial network adaptive discriminator augmentation and laplacian filter variance to improve Faster-RCNN performance in detecting major rice leaf diseases. Compared with the standard data, this method achieves an average accuracy of 93% in rice leaf disease detection, and the Faster R-CNN model has a significant improvement in rice leaf disease detection. The structure of Faster R-CNN is manifested in <xref ref-type="fig" rid="f2">
<bold>Figure&#xa0;2</bold>
</xref>.</p>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>Two-stage detection algorithm based on apple rust leaf diseases. It achieves high-precision object detection performance through a two-stage network and a regional proposal network. Compared with other one-stage networks, Faster R-CNN is more accurate in handling multi-scale and small target shortcomings.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1637241-g002.tif">
<alt-text content-type="machine-generated">Diagram illustrating a leaf disease detection process using convolutional neural networks. It includes VGG16 feature extraction with convolution and pooling layers. Sliding windows lead to region proposal networks generating region proposals and scores. An input sample shows a leaf with rust, scored as zero point ninety-two. Detection modules show classification scores and border regression. The ROI undergoes pooling and connection processes to determine the output result.</alt-text>
</graphic>
</fig>
</sec>
<sec id="s3_1_2">
<label>3.1.2</label>
<title>Plant leaf disease detection based on one-stage detector</title>
<p>One-stage detectors make category predictions directly for objects at each position on the feature map, without going through the region suggestion step in two-stage detectors (<xref ref-type="bibr" rid="B87">&#xd6;nler and K&#xf6;yc&#xfc;, 2024</xref>). As shown in <xref ref-type="fig" rid="f3">
<bold>Figure&#xa0;3</bold>
</xref>.</p>
<fig id="f3" position="float">
<label>Figure&#xa0;3</label>
<caption>
<p>One-stage detection algorithm diagram for wheat powdery mildew. Wheat powdery mildew is detected using YOLOv8. There are 896 photos in this dataset. Its average accuracy, recall, and F1 score are 0.79, 0.74, 0.770, 0.76, and 0.35, respectively.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1637241-g003.tif">
<alt-text content-type="machine-generated">Flowchart illustrating the process of detecting wheat powdery mildew. Top left shows a real wheat field image. Top right displays original, flipped, and contrast-enhanced leaf images for augmentation. The process involves data annotation, resizing images, creating train, validation, and test datasets in a 3:1:1 ratio. YOLOv8 is used for model detection, and a selected model is determined. Bottom left depicts a leaf with mildew annotation. Bottom right shows mildew detection results on a leaf.</alt-text>
</graphic>
</fig>
<p>There is YOLO, SSD, and their variants. YOLO employs a single neural network to simultaneously predict bounding boxes and probabilistic images of multiple objects within a range. YOLO, unlike existing approaches, employs a real-time detection algorithm that divides the input image into grids and predicts the cell contents of bounding boxes based on those grids. Real-time performance sacrifices accuracy for fine-grained localization but remains competitive in object detection, making it suitable for a wide range of applications. YOLOv12, one of the newest versions of object-detection architectures, has shown great promise in situations requiring both high detection accuracy and real-time inference. The outstanding YOLOv12 is presented in <xref ref-type="fig" rid="f4">
<bold>Figure&#xa0;4</bold>
</xref>.</p>
<fig id="f4" position="float">
<label>Figure&#xa0;4</label>
<caption>
<p>Apple leaf disease detection for YOLOv12. k is the height and width of the filter in the convolution operation, s is the step size that slides on the input feature map during convolution or upsampling, oc is the channel dimension of the output feature map of the convolution/module, c is the feature fusion operation performed on the c-th dimension, and sf is the scaling factor used in the upsampling module.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1637241-g004.tif">
<alt-text content-type="machine-generated">Flowchart showing a neural network model for leaf disease detection. The &#x201c;Backbone&#x201d; processes an input leaf image through convolutional layers, with features like C3k2 and A2C2f modules. The &#x201c;Head&#x201d; predicts the health status with a probability of 0.92, indicating the leaf is healthy.</alt-text>
</graphic>
</fig>
<p>
<xref ref-type="bibr" rid="B27">Devisurya et&#xa0;al. (2022)</xref> used YOLOv3 Tiny to detect turmeric illnesses (leaf spot and root rot disease). This approach outperforms existing YOLO variations, including the Faster R-CNN based on the visual geometry group (VGG16). <xref ref-type="bibr" rid="B8">Arun et&#xa0;al. (2024)</xref> employed T-YOLO v4 to detect rice leaf disease. This approach builds on YOLOv4 by including a YOLO detection layer, spatial pyramid pooling, a convolutional block attention module, an hourglass feature extraction module, and a Ghost module. It outperformed the YOLOv7 model in terms of recognition accuracy, scoring 86.36%. <xref ref-type="bibr" rid="B25">Daniela et&#xa0;al. (2024)</xref> used YOLOv7, YOLOv8, and YOLO-NAS to annotate the entire leaf disease area and achieved an accuracy of 97.9%. However, in the task of whole pod disease detection, YOLOv7 and YOLOv8 performed better, with accuracy rates exceeding 95%. <xref ref-type="bibr" rid="B62">Liu et&#xa0;al. (2016)</xref> designed a multi-scale prior box for YOLO to address defects with low accuracy and inaccurate localization in small object detection. <xref ref-type="bibr" rid="B112">Tian et al. (2022)</xref> proposed an improved SSD method for detecting apple leaf diseases. This method achieved an accuracy of 83.19% and a detection speed of 27.53 FPS on the apple leaf disease test set.</p>
<p>
<xref ref-type="bibr" rid="B35">Guo et&#xa0;al. (2022)</xref> offered a cotton detection method based on improved SSD. This method introduces the lightweight network MobileNetV2 to improve the backbone feature extraction network and then integrates the SE attention mechanism, the ECA attention mechanism, and the CBAM attention mechanism. The improved SSD_MobileNetV2+ efficient channel attention (ECA) model has higher accuracy than the SSD_VGG network model in complex cases, reaching 84.8%. <xref ref-type="bibr" rid="B127">Yin et&#xa0;al. (2024)</xref> applied a disease point identification method based on a SSD network, which improved the accuracy of disease point identification. The learned parameter information is transferred to the backbone feature extraction network of the SSD model, which shortens the blob detection time to 0.14 s. When the intersection over union (IoU) threshold is 0.5, the mean average precision at IoU=0.5 (mAP@0.5), the average accuracy of the algorithm reaches 97.1%, which is about 6.35% higher than that of the original algorithm. Compared to existing algorithms such as YOLOv5 and Faster R-CNN, mAP0.5 improves by 16.84% and 8.61%, respectively. Although improved and optimized continuously, the one-stage detection algorithm has been improved in both accuracy and speed. mAP@0.5 is the primary metric for determining the model&#x2019;s localization ability in object detection tasks. It assesses the model&#x2019;s approximation detection performance on the target by applying a 50% IoU threshold. It is appropriate for screening models that meet fundamental positioning requirements (e.g., existence judgment or counting tasks), and prioritizing models with high indicators (e.g., the YOLO series) allows for a balance of efficiency and accuracy. However, it is unaffected by small targets, thick sceneries, or fine border detection and is easily influenced by simple sample distributions.</p>
<p>From the standpoint of algorithm universality, several researchers optimize algorithms for specific plant diseases. Although they had good findings, the model&#x2019;s universality was not optimal. When confronted with different types of plants, illnesses, and complicated and diverse actual agricultural production conditions, the performance of these models may suffer. A large amount of data collecting and model training effort must be redone.</p>
<p>In terms of precision and speed, while YOLO achieves real-time performance by foregoing some fine-grained positioning, this balance is not ideal in practical applications. As the model becomes more complicated, it may have an impact on real-time detection performance. How to improve detection speed and establish a more perfect balance while maintaining high accuracy is an important subject that must be addressed in future study.</p>
<p>Although SSD solves YOLO&#x2019;s problem of inaccurate small object detection and localization, it also has its own limitations. It faces more complex and varied actual disease scenarios, and its robustness still needs further verification. There is a lack of unified standards and comprehensive evaluation for comparing the effects of different improvement methods, making it difficult to determine which improvement strategy is more universal and superior.</p>
<p>In addition, current research mostly focuses on improving and optimizing the algorithms themselves, with relatively less attention paid to data quality and annotation. High-quality and diverse data are the foundation for training high-performance models, but obtaining large-scale, high-quality, and accurately labeled plant disease data in actual agricultural production is not an easy task. The noise and annotation errors in the data may have a negative impact on the training and performance of the model. Effective data cleaning and annotation optimization methods face certain challenges.</p>
</sec>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Foreign major plant leaf disease image database</title>
<p>
<xref ref-type="bibr" rid="B141">Zhou et&#xa0;al. (2019)</xref> introduced CenterNet, an anchor-free detection algorithm that is based on CornerNet and replaces the original detection of two key points (i.e., the upper left and lower right corners of the picture) with the estimation of the image&#x2019;s center point. Because this technique avoids the need to generate an anchor box and estimate loss using a thermal map, it saves time and improves detection performance significantly. CenterNet-based illness detection is currently understudied. However, it has been shown to be relevant to target identification in natural conditions.</p>
<p>
<xref ref-type="bibr" rid="B78">Marriam et&#xa0;al. (2023)</xref> provided the CoffeeNet model to detect various infections in coffee plant leaves. The model was compared against other deep learning models, including GoogLeNet, AlexNet, and EfficientNet-B0. The results revealed that CoffeeNet outperformed in terms of recognition accuracy and efficiency, with an accuracy rate of 98.54%, as well as processing time and disease-specific key point localization. <xref ref-type="bibr" rid="B71">Liu W. et&#xa0;al. (2023)</xref> designed an improved YOLOX tomato leaf disease identification method. They replace the YOLOX backbone network with MobileNetV3 for lightweight model feature extraction and add a CBAM module. According to simulation experiments and field tests, YOLOX&#x2019;s accuracy rate increased by 1.27%, up to 98.56%. <xref ref-type="bibr" rid="B4">Albattan et&#xa0;al. (2021)</xref> constructed a CornerNet model based on DenseNet-77 to detect 10 types of tomato leaf diseases. By modifying CornerNet, feature extraction, and classification, the model can accurately locate disease regions. Compared with other object detection technologies, this model still shows high reliability under conditions such as noise, illumination changes, color changes, and size changes. The average accuracy value reaches 98.4%, which is higher than the average accuracy value of the comparison method. 12.42% higher.</p>
<p>
<xref ref-type="bibr" rid="B125">Yi et&#xa0;al. (2024)</xref> created a model for identifying and locating citrus Huanglong disease. The accuracy rate of the upgraded model was 7.9% higher than that of the RT-DETR-r18 model, and it demonstrated notable progress in a number of important metrics. The highest accuracy of the model is 92.7%. The method ensures accurate location and identification of citrus Huanglong disease in complex and diverse environments. The detection algorithm without an anchor frame is superior to the detection algorithm based on an anchor frame in performance, and it will be the main research direction in disease area detection in the future.</p>
<p>The anchor-free box detection algorithm has become an important technology in the field of plant leaf disease detection due to its advantages of simplified process, improved efficiency, and strong robustness. Although current research is based on frameworks such as CenterNet and CornerNet, and high-precision detection is achieved through CoffeeNet and HHS-RT-DETR, the resolution of small targets is limited, and the deployment of edge devices is still difficult to overcome. Future research should focus on two directions: first, developing lightweight anchor-free models suitable for field edge devices using neural architecture search and model compression techniques, and second, incorporating mechanisms such as dynamic feature pyramids and high-resolution heatmaps to improve the sensitivity of detecting small lesions. Plant disease detection systems will advance toward real-time precision and universality as the anchor-free paradigm, transformer architecture, and multimodal perception technologies are fully integrated. This results in more effective technical support for smart agriculture.</p>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Analysis and prospect of plant leaf disease detection</title>
<p>
<xref ref-type="table" rid="T2">
<bold>Tables&#xa0;2</bold>
</xref>&#x2013;<xref ref-type="table" rid="T4">
<bold>4</bold>
</xref> present an analysis of the state of research on intelligent plant disease detection technology. The identification of diseases in soybeans, corn, potatoes, and other important commercial crops is the main emphasis of the current study. Good detection performance is demonstrated by both one-stage and two-stage detection models. Though pathological features without distinct edges are frequently left out of the detection range, there are notable variations in the criteria used to define lesion areas. For example, some studies independently identify large-area lesions on single leaves, while others use an overall identification strategy for densely distributed small lesions. It should be noted that there are still technical bottlenecks in the application of existing algorithms in actual farmland environments, especially in the face of high-density small target detection. It is necessary to enhance the current models&#x2019; adaptability under dynamic environmental settings such as complicated lighting, background interference, and target occlusion.</p>
<table-wrap id="T2" position="float">
<label>Table&#xa0;2</label>
<caption>
<p>Plant leaf disease detection based on one-stage detector.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="left">Authors, Years</th>
<th valign="top" align="left">Class</th>
<th valign="top" align="left">Total</th>
<th valign="top" align="left">Collect ways</th>
<th valign="top" align="left">Methods</th>
<th valign="top" align="left">Performance</th>
<th valign="top" align="left">DOI</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B29">Di and Li (2022)</xref>
</td>
<td valign="top" align="left">Apple</td>
<td valign="top" align="left">1404</td>
<td valign="top" align="left">Plant Village</td>
<td valign="top" align="left">Tiny-YOLO</td>
<td valign="top" align="left">Precision=0.938<break/>Recall=0.99<break/>Accuracy=0.99<break/>IoU=0.805<break/>mAP=0.9981</td>
<td valign="top" align="left">10.1371/jou- rnal.pone.0262629</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B105">Soeb et&#xa0;al. (2023)</xref>
</td>
<td valign="top" align="left">Tea</td>
<td valign="top" align="left">4000</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">Improved YOLOv 7</td>
<td valign="top" align="left">Precision=0.967<break/>Recall=0.964<break/>F1-Score=0.965 Accuracy=0.973<break/>Train Time =19430s<break/>mAP=0.982</td>
<td valign="top" align="left">10.1038/S41598-0 23-33270-4</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B76">Luo et&#xa0;al. (2023)</xref>
</td>
<td valign="top" align="left">Citrus</td>
<td valign="top" align="left">3202</td>
<td valign="top" align="left">Online</td>
<td valign="top" align="left">Light-SA YOLOV 8</td>
<td valign="top" align="left">Precision=0.926<break/>Recall=0.894<break/>F1-Score=0.91<break/>Model size=4.5MB</td>
<td valign="top" align="left">10.1109/ACC ESS.2023.3340148</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B61">Lin et&#xa0;al. (2023)</xref>
</td>
<td valign="top" align="left">Tea</td>
<td valign="top" align="left">1000</td>
<td valign="top" align="left">Drone</td>
<td valign="top" align="left">TSBA-YOLO</td>
<td valign="top" align="left">Precision=0.8783<break/>Recall=0.8527</td>
<td valign="top" align="left">10.3390/f 14030619</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B133">Zhang D. et&#xa0;al. (2024)</xref>
</td>
<td valign="top" align="left">Wheat</td>
<td valign="top" align="left">20000</td>
<td valign="top" align="left">Microscope</td>
<td valign="top" align="left">GSD-YOLO</td>
<td valign="top" align="left">Precision=0.938 Recall=0.954</td>
<td valign="top" align="left">10.3390/agriculture14122278</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B107">Sun et&#xa0;al. (2024)</xref>
</td>
<td valign="top" align="left">Apple</td>
<td valign="top" align="left">4863</td>
<td valign="top" align="left">Online</td>
<td valign="top" align="left">YOLOv 5-Res</td>
<td valign="top" align="left">Precision=0.814<break/>Recall=0.769<break/>Model size=10.8MB<break/>mAP=0.869</td>
<td valign="top" align="left">10.3390/agronomy14061331</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B129">Yuan et&#xa0;al. (2024)</xref>
</td>
<td valign="top" align="left">Pine</td>
<td valign="top" align="left">11200</td>
<td valign="top" align="left">Drone</td>
<td valign="top" align="left">Light-ViTeYOLO</td>
<td valign="top" align="left">Recall=0.957</td>
<td valign="top" align="left">10.3390/f15061050</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B55">Li Y. et&#xa0;al. (2024)</xref>
</td>
<td valign="top" align="left">Rubber</td>
<td valign="top" align="left">6200</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">PM-YOLO</td>
<td valign="top" align="left">Precision=0.848 Recall=0.856 F1-Score=0.852<break/>mAP=0.869</td>
<td valign="top" align="left">10.3390/insects15120937</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B135">Zhang S. et&#xa0;al. (2024)</xref>
</td>
<td valign="top" align="left">Jute</td>
<td valign="top" align="left">3252</td>
<td valign="top" align="left">Baidu Google</td>
<td valign="top" align="left">JutePest-YOLO</td>
<td valign="top" align="left">Precision=0.9872, Recall=0.949 F1-Score=0.9677</td>
<td valign="top" align="left">10.1109/ACCESS.2024.3403491</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B124">Ye et&#xa0;al. (2024b)</xref>
</td>
<td valign="top" align="left">Tea</td>
<td valign="top" align="left">3743</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">Improved YOLOv 8</td>
<td valign="top" align="left">Precision=0.8747 Recall=0.8917 F1-Score=0.8831<break/>mAP=0.9526</td>
<td valign="top" align="left">10.3390/plants13101377</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B16">Chen Z. et&#xa0;al. (2024)</xref>
</td>
<td valign="top" align="left">Grape</td>
<td valign="top" align="left">10000</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">YOLOv8-ACCW</td>
<td valign="top" align="left">F1-Score=0.924<break/>mAP=0.928</td>
<td valign="top" align="left">10.1109/ACCESS.2024.3453379</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B82">Meng et&#xa0;al. (2025)</xref>
</td>
<td valign="top" align="left">Maize</td>
<td valign="top" align="left">14700</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">YOLO-MSM</td>
<td valign="top" align="left">Precision=0.9011 Recall=0.8264 TrainTime= 3312s</td>
<td valign="top" align="left">10.1038/s41598-025-88399-1</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B143">Zhu et&#xa0;al. (2025b)</xref>
</td>
<td valign="top" align="left">Grape</td>
<td valign="top" align="left">17642</td>
<td valign="top" align="left">Plant Village</td>
<td valign="top" align="left">YOLOv8</td>
<td valign="top" align="left">Precision=0.9264<break/>Recall=0.9328</td>
<td valign="top" align="left">10.1371/journal.pone.0321788</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B66">Liu Z. et&#xa0;al. (2025)</xref>
</td>
<td valign="top" align="left">Tomato</td>
<td valign="top" align="left">2646</td>
<td valign="top" align="left">Plant Village</td>
<td valign="top" align="left">YOLO-BSMamba</td>
<td valign="top" align="left">Precision=0.858 Recall=0.784 F1-Score=0.819</td>
<td valign="top" align="left">10.3390/agronomy15040870</td>
</tr>
</tbody>
</table>
</table-wrap>
<table-wrap id="T3" position="float">
<label>Table&#xa0;3</label>
<caption>
<p>Plant leaf disease detection based on two-stage detector.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="left">Authors, Years</th>
<th valign="top" align="left">Class</th>
<th valign="top" align="left">Total</th>
<th valign="top" align="left">Collect ways</th>
<th valign="top" align="left">Methods</th>
<th valign="top" align="left">Performer</th>
<th valign="top" align="left">DOI</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B67">Liu H. et&#xa0;al. (2022)</xref>
</td>
<td valign="top" align="left">Maize</td>
<td valign="top" align="left">7222</td>
<td valign="top" align="left">Field shot</td>
<td valign="top" align="left">LS-RCNN and CENet</td>
<td valign="top" align="left">Accuracy=0.99<break/>Precision=0.99<break/>recall=0.99<break/>F1-score=0.30</td>
<td valign="top" align="left">10.1038/S41598-022-23484-3.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B85">Nawaz et&#xa0;al. (2022)</xref>
</td>
<td valign="top" align="left">Tomato</td>
<td valign="top" align="left">54306</td>
<td valign="top" align="left">PlantVillage</td>
<td valign="top" align="left">Faster-RCNN</td>
<td valign="top" align="left">Accuracy=0.98<break/>Precision=0.99<break/>recall=0.99<break/>F1-score=0.99<break/>mAP=0.98<break/>IoU=0.93</td>
<td valign="top" align="left">10.1038/S41598-022-21498-5</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B34">Gong and Zhang (2023)</xref>
</td>
<td valign="top" align="left">Apple</td>
<td valign="top" align="left">4182</td>
<td valign="top" align="left">Mobile phone</td>
<td valign="top" align="left">Faster R-CNN</td>
<td valign="top" align="left">Accuracy=0.63<break/>Precision=0.63<break/>recall=0.71<break/>IoU=0.75</td>
<td valign="top" align="left">10.3390/agriculture 13020240.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B108">Sun et&#xa0;al. (2023)</xref>
</td>
<td valign="top" align="left">Tomato</td>
<td valign="top" align="left">5090</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">Veg DenseCap</td>
<td valign="top" align="left">Accuracy=0.88<break/>Precision=0.99<break/>mAP=0.93<break/>IoU=0.70</td>
<td valign="top" align="left">10.3390/agronomy13071 700.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B140">Zhou et&#xa0;al. (2024)</xref>
</td>
<td valign="top" align="left">Rice</td>
<td valign="top" align="left">10409</td>
<td valign="top" align="left">Field</td>
<td valign="top" align="left">R-CNN and YOLOv</td>
<td valign="top" align="left">Accuracy=0.98<break/>Precision=0.91<break/>recall=0.95<break/>F1-score=0.99<break/>mAP=0.95<break/>IoU=0.95</td>
<td valign="top" align="left">10.3390/AGRICULTURE14020290.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B132">Zhang Z. et&#xa0;al. (2024)</xref>
</td>
<td valign="top" align="left">Tea</td>
<td valign="top" align="left">1692</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">Faster RCNN</td>
<td valign="top" align="left">Accuracy=0.87<break/>Precision=0.87<break/>recall=0.90</td>
<td valign="top" align="left">10.1016/J.SCIENTA.2024.112949.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B64">Liu J. et&#xa0;al. (2025)</xref>
</td>
<td valign="top" align="left">Rice</td>
<td valign="top" align="left">3754</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">Faster R-CNN</td>
<td valign="top" align="left">Accuracy=0.83<break/>mAP=0.83<break/>IoU=0.50</td>
<td valign="top" align="left">10.5755/j01.itc.54.1.39520.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B40">He J. et al. (2025)</xref>
</td>
<td valign="top" align="left">Maize</td>
<td valign="top" align="left">7928</td>
<td valign="top" align="left">Plant Village</td>
<td valign="top" align="left">YOLOv11-RCDWD</td>
<td valign="top" align="left">Accuracy=0.92<break/>Precision=0.92<break/>recall=0.85<break/>F1-score=0.88 mAP=0.66</td>
<td valign="top" align="left">10.3390/app15084535.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B122">Yadav and Tewari (2025)</xref>
</td>
<td valign="top" align="left">Tomato</td>
<td valign="top" align="left">2786</td>
<td valign="top" align="left">Fieldplant</td>
<td valign="top" align="left">CONF-RCNN</td>
<td valign="top" align="left">Accuracy=0.90</td>
<td valign="top" align="left">10.1007/s41348-024-01057-y.</td>
</tr>
</tbody>
</table>
</table-wrap>
<table-wrap id="T4" position="float">
<label>Table&#xa0;4</label>
<caption>
<p>Plant leaf disease detection based on anchor-free.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="left">Authors, Years</th>
<th valign="top" align="left">Class</th>
<th valign="top" align="left">Total</th>
<th valign="top" align="left">Collect ways</th>
<th valign="top" align="left">Methods</th>
<th valign="top" align="left">Performance</th>
<th valign="top" align="left">DOI</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B9">Bao et&#xa0;al. (2022)</xref>
</td>
<td valign="top" align="left">Tea</td>
<td valign="top" align="left">700</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">AX-RetinaNet</td>
<td valign="top" align="left">Precision=0.9675 Recall=0.94 F1-Score=0.954<break/>mAP=0.9383</td>
<td valign="top" align="left">10.1038/s41598-022- 06181-z</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B59">Li X. et&#xa0;al. (2023)</xref>
</td>
<td valign="top" align="left">Multiple plant</td>
<td valign="top" align="left">58486</td>
<td valign="top" align="left">PlantVillage</td>
<td valign="top" align="left">VLDNet</td>
<td valign="top" align="left">Precision=0.983 Recall=0.9832 F1-Score=0.9831 Accuracy=0.9832</td>
<td valign="top" align="left">10.3390/agriculture13081482</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B45">Hu X. et&#xa0;al. (2023)</xref>
</td>
<td valign="top" align="left">Strawberry</td>
<td valign="top" align="left">3411</td>
<td valign="top" align="left">Field</td>
<td valign="top" align="left">CALP-CNN</td>
<td valign="top" align="left">Precision=0.9255 Recall=0.918 F1-Score=0.9196 Accuracy=0.9256</td>
<td valign="top" align="left">10.3389/fpls.2023.1091600</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B18">Chen et&#xa0;al. (2023)</xref>
</td>
<td valign="top" align="left">Oilseed rape</td>
<td valign="top" align="left">1764</td>
<td valign="top" align="left">Controlled</td>
<td valign="top" align="left">AMDFNet</td>
<td valign="top" align="left">Precision=0.8461 Recall=0.8495 Accuracy=0.8678</td>
<td valign="top" align="left">10.3390/plants12142701</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B101">Shao et&#xa0;al. (2024)</xref>
</td>
<td valign="top" align="left">Cotton</td>
<td valign="top" align="left">3910</td>
<td valign="top" align="left">Field shot</td>
<td valign="top" align="left">CANnet</td>
<td valign="top" align="left">Precision=0.988 F1-Score=0.986 Accuracy=0.986</td>
<td valign="top" align="left">10.3390/agriculture14091577</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B58">Li S. et&#xa0;al. (2024)</xref>
</td>
<td valign="top" align="left">Tea</td>
<td valign="top" align="left">658</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">VCRUNet</td>
<td valign="top" align="left">Precision=0.9227 Recall=0.9237 F1-Score=0.9232 Accuracy=0.9248</td>
<td valign="top" align="left">10.1109/ACCESS.202 4.33 73707</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B70">Liu and Wang (2024)</xref>
</td>
<td valign="top" align="left">Multiple plant</td>
<td valign="top" align="left">11564</td>
<td valign="top" align="left">Controlled</td>
<td valign="top" align="left">MIFV</td>
<td valign="top" align="left">Precision=0.9353 Recall=0.9122<break/>mAP=0.9238</td>
<td valign="top" align="left">10.1186/s12870-024-05346-4</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B123">Ye et&#xa0;al. (2024a)</xref>
</td>
<td valign="top" align="left">Tea</td>
<td valign="top" align="left">4560</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">YOLOv8-RMDA</td>
<td valign="top" align="left">Precision=0.8484 Recall=0.8821</td>
<td valign="top" align="left">10.3390/s240928 96</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B115">Wang J. Y. et&#xa0;al. (2024)</xref>
</td>
<td valign="top" align="left">Tea</td>
<td valign="top" align="left">4001</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">YOLOv8-RCAA</td>
<td valign="top" align="left">Precision=0.9823 Recall=0.8534 F1-Score=0.9133<break/>mAP=0.9814</td>
<td valign="top" align="left">10.3390/agriculture14081240</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B111">Thai et&#xa0;al. (2025)</xref>
</td>
<td valign="top" align="left">Banana</td>
<td valign="top" align="left">40114</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">EF-CenterNet</td>
<td valign="top" align="left">F1-Score=0.5688</td>
<td valign="top" align="left">10.1016/j.compag.2025.109927</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B42">He Y. J. et&#xa0;al. (2025)</xref>
</td>
<td valign="top" align="left">Passion<break/>Fruit</td>
<td valign="top" align="left">6993</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">Transformer</td>
<td valign="top" align="left">Precision=0.93 Recall=0.88 F1-Score=0.9 Accuracy=0.91</td>
<td valign="top" align="left">10.3390/agriculture15070733</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B74">Long et&#xa0;al. (2025)</xref>
</td>
<td valign="top" align="left">Tomato</td>
<td valign="top" align="left">3625</td>
<td valign="top" align="left">Online</td>
<td valign="top" align="left">Graph-CenterNet</td>
<td valign="top" align="left">Precision=0.9901 Recall=0.9276 F1-Score=0.96</td>
<td valign="top" align="left">10.3390/agronomy1503 0667</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Current methods lack the ability to generalize across datasets, despite their strong performance on particular datasets. Building a robust universal detection framework will become an important research direction. In terms of detection timeliness, early disease identification research is in its infancy, mainly limited by the difficulty of data acquisition and the finer morphological characteristics of early lesions. Because the initial symptoms are not obvious, it is a major challenge to accurately identify the disease type and locate the focus area. However, early warning plays a key role in controlling the spread of pathogens, so it is important to develop early detection technology based on weak feature recognition. In addition, the existing detection system still relies on manual auxiliary operation.</p>
</sec>
</sec>
<sec id="s4">
<label>4</label>
<title>Advances in plant leaf disease classification</title>
<p>The technical ability to process and analyze disease images in order to detect and differentiate between various disease categories is known as plant disease recognition technology. This technology serves as a crucial foundation for the prompt prevention and efficient treatment of plant diseases.</p>
<p>In the early stages of plant disease recognition research, feature extraction and screening relied heavily on artificial experience and domain knowledge. The core of its technical performance lies in whether it has two characteristics: one is whether it can fully express the essential information of disease targets and maintain high discrimination among categories. The other is whether it can form effective collaboration with classification algorithms. Traditional methods mainly use three kinds of visual features-shape, color, and texture-to classify and model. Texture features use gray level co-occurrence matrix, fractal dimension, Gabor filter, and other tools. Fractal dimension in particular is a useful tool for measuring the subtle variations in disease spot surface roughness.</p>
<p>Research practice shows that fusing multi-dimensional features and constructing a classifier ensemble system is the main technical path to improve accuracy. However, this artificial feature engineering poses a dual challenge to the practitioner&#x2019;s professionalism: it requires both a deep accumulation of image processing technology and a knowledge reserve of plant pathology. Multiple interference elements, such as plant development stage difference, disease polymorphism, and environmental light change, make dynamic optimum adjustment of artificially built characteristic systems challenging to achieve. Traditional methods&#x2019; recognition accuracy tends to decline dramatically in complex field circumstances, particularly when the symptoms display atypical symptoms or multiple disruptions. This constraint derives mostly from the difficulties of using artificial feature modeling to adequately address the complexity and diversity of illness representations in real-world circumstances.</p>
<p>Deep learning learns feature representations directly from original image pixels through adaptive general algorithms, fundamentally breaking through the limitations of traditional artificial feature engineering. A number of significant convolutional neural network architectures have been developed as a result of changes in computing paradigms. These models include ZFNet in 2013, VGG and GoogleNet in 2014, ResNet in 2015, and later versions of DenseNet (2017), MobileNet (2017), and EfficientNet (2019). Through cutting-edge techniques like residual connection, deep separable convolution, and composite scaling, these network designs keep developing iteratively, progressively creating a technical ecosystem that can adjust to the demands of many scenarios. In general, deep learning-based techniques for identifying plant diseases adhere to the fundamental flow depicted in <xref ref-type="fig" rid="f5">
<bold>Figure&#xa0;5</bold>
</xref>.</p>
<fig id="f5" position="float">
<label>Figure&#xa0;5</label>
<caption>
<p>Neural network recognition based on rice leaf diseases. Image preprocessing begins by gathering image data for our project. It includes data unification and augmentation where we amalgamate multiple datasets and generate synthetic data. Our proposed deep learning model which takes an image as input and classifies it into one of the disease categories based on the features of the image. The deployment of our model which includes an application programming interface (API), an android app,and a website.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1637241-g005.tif">
<alt-text content-type="machine-generated">Flowchart illustrating a process for detecting rice diseases using deep learning. It starts with data collection, unifications, and augmentation. Images show rice diseases: tungro, bacterial blast, and brown spot. These images feed into a deep learning CNN model, leading to model deployment. The output connects to an app, API, and website, providing predictions like &#x201c;Crop Name: Rice, Disease: Blast."</alt-text>
</graphic>
</fig>
<sec id="s4_1">
<label>4.1</label>
<title>Plant leaf disease identification based on deep neural networks</title>
<p>Compared with traditional plant leaf disease detection methods, the deep learning method realizes the automation of image processing flow by constructing an end-to-end recognition framework, effectively reduces manual operation links, and significantly improves detection efficiency. The independent learning ability of the model improves steadily as its complexity and network depth rise. It can record not only the global morphological aspects of leaf lesions but also multidimensional feature information, including cell structure and texture gradient. However, while the parameter scale of the deep model increases, it is also accompanied by the increase of model training time cost, gradient disappearance, overfitting, and other risk problems. Thus, by enhancing the network topology structure, adding an attention mechanism, and refining the regularization strategy, researchers are actively investigating the use of convolutional neural network architecture in the field of plant pathology detection and are consistently improving the model&#x2019;s recognition accuracy for multi-category diseases and its generalization ability in complex environments.</p>
<p>
<xref ref-type="bibr" rid="B100">Shah et&#xa0;al. (2023)</xref> introduced a pre-trained model for citrus plant disease recognition and classification utilizing the transfer learning approach and a convolutional neural network (EfficientNetB3, ResNet50, InceptionV2, and InceptionV3). In this experiment, the EfficientNetB3 model has the highest test accuracy, reaching up to 99.58%. <xref ref-type="bibr" rid="B36">Haleem et&#xa0;al. (2022)</xref> designed a robust disease detection method based on convolutional neural networks, which uses CNN&#x2019;s powerful feature extraction ability to detect diseases in fruit and leaf images. The feature extraction pipeline of several of the most advanced pre-trained networks is fine-tuned to achieve the best detection performance. The average accuracy of the optimal model on the test image set is 96.6%. It has 90% accuracy. This indicates how accurate the prediction was. It works well in categorization situations where the cost of false alarms is considerable. To cut down on errors, it must now give top priority to choosing highly accurate models. <xref ref-type="bibr" rid="B13">Bijoy et&#xa0;al. (2024)</xref> demonstrated a dCNN-based model for detecting five types of rice leaf diseases: brown spot, rice blast, bacterial blight, sheath blight, and rice blast. They compared their model with 21 benchmark frameworks and 14 parallel methods and verified the effectiveness of this method through a large number of experimental results. The accuracy rate of this method reached 99.81%.</p>
<p>
<xref ref-type="bibr" rid="B73">Lobna et&#xa0;al. (2024)</xref> used a large-scale dataset of 70,834 images to compare the performance of a tomato leaf disease classification and recognition method based on an optimized Capsule Neural Network (CapsNet) to a traditional convolutional neural network, achieving an accuracy rate of 96.39%. <xref ref-type="bibr" rid="B49">Kanda et&#xa0;al. (2022)</xref> presented a residual neural network approach for identifying tomato illnesses. The study examined variety at four levels: depth size, discrimination learning rate, training and validation data segmentation ratio, and batch size. Five network depths were utilized to determine the network&#x2019;s correctness for experimental analysis. The experimental results reveal that this method outperforms prior competitive methods in tomato leaf disease identification, achieving 99.5% accuracy. <xref ref-type="bibr" rid="B104">Sk and Arnab (2022)</xref> demonstrated a new deep learning model based on initial layers and residual connections, which used deep separable convolution to reduce the number of model parameters. After training and testing on three plant disease datasets, the model achieved an accuracy of 99.39% on the PlantVillage dataset.</p>
<p>
<xref ref-type="bibr" rid="B79">Masood et&#xa0;al. (2023)</xref> improved the Faster-RCNN method for computing deep keypoints and designed a deep learning method called MaizNet to locate and classify various types of maize leaf disease with an average accuracy of 97.89%, indicating the effectiveness of locating and classifying multiple types of maize leaf infection. <xref ref-type="bibr" rid="B3">Albahli and Masood (2022)</xref> suggested an end-to-end learning CNN structure for the efficient attention network based on the EfficientNetv2 model, which is utilized to recognize maize crop illnesses in several classes. Compared with traditional neural networks, this model has better performance, and the accuracy rate of corn crop disease classification reaches 99.89%. The framework provided by <xref ref-type="bibr" rid="B46">Ilyas et&#xa0;al. (2022)</xref> consists of three main components. The detector contained therein extracts region-based anomaly features by using a feature extractor of a deep learning network to accurately and efficiently identify and locate anomalies in plants. The algorithm achieves an average accuracy of 91.7% in anomaly detection tasks.</p>
<p>
<xref ref-type="bibr" rid="B28">Dey et&#xa0;al. (2022)</xref> proposed a deep learning-based rice leaf disease recognition framework as shown in <xref ref-type="fig" rid="f6">
<bold>Figure&#xa0;6</bold>
</xref> and achieved the classification accuracy of 91.8%.</p>
<fig id="f6" position="float">
<label>Figure&#xa0;6</label>
<caption>
<p>The overall rice leaf disease recognition framework. This framework is based on VGG16, VGG19, InceptionV3, ResNet50, and a 5-layer CNN to train, validate, and test open-source Kaggle plant disease data and 6 disease categories of rice leaves collected on-site in a ratio of 7:1:2.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1637241-g006.tif">
<alt-text content-type="machine-generated">Flowchart illustrating a process for analyzing plant samples. It starts with a field station collecting samples, followed by sample images, then inputting them into a system. The data is split into 70 percent for training, 20 percent for testing, and 10 percent for validation. Deep learning neural networks, including VGG16, VGG19, ResNet50, and InceptionV3, are used for analysis. A bar graph shows test accuracy, with various models. The prediction result shows potassium was accurately identified with 97.24 percent confidence.</alt-text>
</graphic>
</fig>
<p>
<xref ref-type="bibr" rid="B88">Pan et&#xa0;al. (2024a)</xref> developed a TTN-MobileNetV2 neural network model for plant leaf disease detection based on memristor. The experimental results on the rice leaf disease dataset achieved the highest recognition accuracy of 99.16%. <xref ref-type="bibr" rid="B90">Pandian et&#xa0;al. (2022)</xref> performed a deep convolutional neural network (DCNN) model for image-based plant leaf disease recognition and trained it on an enhanced dataset of more than 240,000 images of different healthy and diseased plant leaves and backgrounds with an average classification accuracy of 98.41% on the test dataset.</p>
<p>Although researchers used large-scale datasets, the data&#x2019;s diversity and representativeness remain insufficient. Many datasets may only include plant disease samples from specific regions, seasons, or planting settings, making it difficult to capture the complex and diverse disease circumstances seen in actual agricultural production. Plant disease performance characteristics may vary across climatic conditions, and existing datasets may not fully reflect this diversity, limiting the model&#x2019;s generalizability in practical applications and making it difficult to accurately identify new environments or rare diseases.</p>
<p>Although researchers consistently improve model performance by strengthening network structures and integrating new mechanisms, the model&#x2019;s complexity grows in tandem. Complex models not only need more computer resources and longer training times, but they may also have poor interpretability. In actual agricultural productivity, specialists are more concerned with why models produce such diagnostic results. Deep learning models behave like black boxes, making it difficult to deliver clear and understandable explanations. This has had an impact on the model&#x2019;s promotion and implementation in production.</p>
<p>In addition, there are differences in experimental settings and evaluation criteria between different studies, which makes it difficult to make fair and objective comparisons of the performance of each model. Some studies may only be tested on specific datasets or experimental environments, without fully considering various interference factors in practical applications, such as changes in lighting, occlusion, noise, etc., resulting in a gap between experimental results and actual application effects.</p>
<p>Although deep learning has brought new opportunities and breakthroughs for plant leaf disease detection, in order to achieve its widespread and effective application in agricultural production, further research is needed in improving data quality, enhancing model interpretability, and standardizing experimental evaluation.</p>
</sec>
<sec id="s4_2">
<label>4.2</label>
<title>Plant leaf disease identification based on lightweight networks</title>
<p>In recent years, in order to adapt to the limitations of limited data resources in complex natural scenes, researchers have launched research on lightweight networks. <xref ref-type="bibr" rid="B113">Vasudevan and Karthick (2024)</xref> proposed an advanced capsule method to detect grape leaf disease. By constructing a lightweight capsule network, convolution can be separated by depth. Compared with existing deep learning models ResNeXt and traditional capsule neural networks, the computational complexity of this method is significantly reduced. Comparing the advanced Capsule network empirically with traditional CapsNet and ResNeXt models, the overall parameters and training time are much less than ResNeXt and traditional CapsNet. The accuracy of this method is 95.041%. <xref ref-type="bibr" rid="B65">Liu Y. et&#xa0;al. (2022)</xref> described a modified lightweight convolutional neural network, SqueezeNext, that included a multi-scale convolution kernel and a coordinate attention method for accurately extracting lesion information. The model achieves 91.94% recognition accuracy in the 2018 plant disease dataset. 3.02% more than the original model. The technology is suited for deployment on mobile devices and other embedded devices with modest resources, and it contributes to the popularization of intelligent agriculture. <xref ref-type="bibr" rid="B24">Dai M. et&#xa0;al. (2023)</xref> constructed an enhanced lightweight model based on GoogLeNet architecture. Compared with GoogLeNet based on Inception-V1 and Inception-V3, the model&#x2019;s requirements are reduced by 52.31% and 86.69%, respectively. Compared with AlexNet, ResNet-50, and MobileNet-V2, the accuracy of this model reaches 97.87%, which is significantly higher than other models. The identification precision and calculation performance of pepper leaf diseases have advantages, which is beneficial to further large-scale popularization and application in pepper planting. The recall rate and F1-score of this method are both about 99%. Recall is suitable for recognition scenarios with high missed detection costs. At this point, it needs to prioritize ensuring a high recall rate to reduce the risk of missed detections. However, it also needs to be balanced against precision. Adjust the classification threshold based on task requirements to avoid indicator distortion caused by data distribution or threshold sensitivity. The F1-score, as a comprehensive indicator of accuracy and recall, directly reflects the model&#x2019;s ability to balance &#x201c;reducing misjudgments&#x201d; and &#x201c;avoiding missed detections&#x201d;. The higher the F1 score, the more balanced the model performance, and tasks that are sensitive to both false positives and false negatives are more likely to choose high F1-score models. A lower F1-score may indicate that the model is biased towards conservatism or radicalism. It needs to adjust the threshold or optimize the model structure based on task requirements to improve overall performance.</p>
<p>
<xref ref-type="bibr" rid="B77">Ma et&#xa0;al. (2023)</xref> explored a model that can recognize apple leaf images with complex backgrounds and disease symptoms. Model_Lite outperforms MobileNet, ShuffleNet, SqueezeNet, and GhostNet in terms of average recognition accuracy while using far fewer parameters and processing resources. The maximum grouping model&#x2019;s average recognition accuracy improves by 0.19% while maintaining the same parameters and computational resources. By reducing the number of convolution kernels, the network model is reasonably simplified. The final model is a model with fewer convolution kernels, and its parameters and computational complexity are 1/344 and 1/35 of the original ResNet18 model, respectively. Although the average accuracy dropped by 0.34% on the experimental dataset, Model_Lite achieved the highest recognition accuracy of 91.21% compared to lightweight networks such as MobileNet, ShuffleNet, SqueezeNet, and GhostNet.</p>
<p>
<xref ref-type="bibr" rid="B39">He et&#xa0;al. (2022)</xref> used multi-spectral images as inputs for the MobileNetV3 and installed them on handheld edge devices to identify navel orange leaf diseases. The process is presented in <xref ref-type="fig" rid="f7">
<bold>Figure&#xa0;7</bold>
</xref>.</p>
<fig id="f7" position="float">
<label>Figure&#xa0;7</label>
<caption>
<p>The overall orange and pokan leaf disease identification system. This system collected multispectral reflectance and fluorescence images of healthy, asymptomatic, symptomatic, magnesium-deficient, and nitrogen-deficient leaves of two varieties, Ponkan and Navel Orange, respectively. Through deep learning, the lightweight network MobileNetV3, and transfer learning, an overall classification accuracy of 92.1% was achieved.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1637241-g007.tif">
<alt-text content-type="machine-generated">Diagram illustrating a system for detecting leaf conditions using multispectral reflectance and fluorescence imaging. The top left shows orange and pokan samples, with leaf conditions labeled as HLB-asymptomatic, HLB-symptomatic, nutrient-deficient, and magnesium-deficient. Central is a schematic of the imaging system with a CCD camera, LED excitation, and filters. The lower section depicts a prediction process using MobileNetV3 and transfer learning, identifying leaf conditions with confidence levels.</alt-text>
</graphic>
</fig>
<p>Although researchers have achieved good results in using lightweight networks for plant leaf disease recognition in complex natural scenes in recent years, there are still some problems. From the perspective of balancing model performance, some researchers have a poor grasp of the balance between accuracy and computational complexity in the pursuit of lightweighting. Although higher recognition accuracy has been achieved than MobileNet and ShuffleNet lightweight networks, the accuracy on the dataset has decreased slightly compared to the original model. This indicates that resource constraints have a certain impact on the accuracy of the model. Researchers need to further explore how to achieve more extreme lightweighting while ensuring high precision. Most existing research focuses on the detection of specific plant diseases. Although these models perform well in their respective disease detection, their universality is poor when facing complex and varied natural scenes with a mixture of multiple plant diseases. In practical applications, farmers often plant multiple crops with a wide variety of diseases. Therefore, developing lightweight plant disease detection models with broad applicability is an important challenge for the future. There are differences in the datasets, evaluation metrics, and experimental environments used in different studies, which makes it difficult to directly compare the performance of various models. Researchers should establish unified standards and norms and adopt more comparable experimental settings in order to more accurately evaluate the advantages and disadvantages of different models.</p>
<p>We used an improved MobileNetV3 to perform accurate pea leaf disease classification. The framework is sketched in <xref ref-type="fig" rid="f8">
<bold>Figure&#xa0;8</bold>
</xref>.</p>
<fig id="f8" position="float">
<label>Figure&#xa0;8</label>
<caption>
<p>The overall pea leaf disease classification framework. MobilenetV3 adopts a lightweight network design with efficient computation and memory consumption. Compared to other networks, it has fewer parameters and can perform inference faster on mobile devices. It introduces a new Block structure and combines the squeeze and excitation (SE) module with a new activation function H-wish.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1637241-g008.tif">
<alt-text content-type="machine-generated">Flowchart depicting a deep learning process for plant disease detection. It includes two stages. Stage 1 involves Conv2d, Batch Normalization, SE, DCN, and H-swish techniques. Stage 2 includes additional Conv2d layers with self-attention and additive attention mechanisms. The input is leaf images showing diseases like brown spot and root rot. The output results show classification probabilities for each disease, such as brown spot at 95.2% and root rot at 98.8%.</alt-text>
</graphic>
</fig>
<p>In summary, although lightweight networks have shown great potential in plant disease detection in complex natural scenes, further research and improvement are needed in terms of model performance balance, universality, experimental comparison, and interpretability to achieve their widespread application in intelligent agriculture.</p>
</sec>
<sec id="s4_3">
<label>4.3</label>
<title>Synchronous detection and recognition of plant leaf diseases</title>
<p>Plant leaf disease detection and identification can be done synchronously, that is, by simultaneously detecting the disease area and identifying the disease type, in addition to the methodical process of first identifying the disease and then detecting the disease area (i.e., the interested area or internet of things (IOT) area). A Siamese neural network, an effective and loosely supervised model for agricultural disease localization, was studied by <xref ref-type="bibr" rid="B17">Chen J. Y.  et al., 2024</xref>. The model innovatively adopts a twin network structure with a weight-sharing mechanism. According to the results, ADPL-CAM performs best on all network frameworks, with an accuracy rate 27.09% higher than GradCAM and 19.63% higher than SmoothCAM. The accuracy of ADPL-CAM is 54.29%. It can accurately and timely identify and locate leaf diseases in crops. <xref ref-type="bibr" rid="B120">Wu et&#xa0;al. (2025)</xref> offered an LBPAttNet model, which incorporated the lightweight coordinate attention mechanism into ResNet18 and further extracted the local feature structure and texture features of tea diseases by using a binary mode algorithm to obtain a more comprehensive feature representation. The model achieves the highest accuracy of 98.31% in the open tea dataset, which is superior to traditional algorithms such as AlexNet, GoogLeNet, and MobileNet.</p>
<p>
<xref ref-type="bibr" rid="B130">Zhang D.Y. et&#xa0;al. (2023)</xref> used a ResNet-50 backbone network to detect tomato leaf disease degree, outperforming the most sophisticated technique with an accuracy of 95.03%. The model is competitive in tomato disease severity and offers a novel approach to determining plant leaf disease severity. <xref ref-type="bibr" rid="B12">Bhatti et&#xa0;al. (2023)</xref> developed a new system for recognizing plant leaf diseases using Inceptionv3. They trained the model on a dataset of 80,848 photos, reaching an accuracy of 99%, and provided advice for conquering specific diseases.</p>
<p>Numerous creative concepts and noteworthy advancements in the identification of plant leaf diseases have been offered by researchers. They have all improved disease detection performance and accuracy to some degree, from feature extraction techniques to the development of various model architectures. But beneath these successes lies a number of unresolved issues. On the one hand, it is unclear if the model&#x2019;s generalization capacity can resist the test in the intricate and constantly shifting agricultural environment, which could result in performance degradation because of environmental variations and ambiguous early disease indications. On the other hand, certain complicated models need extensive computational resources, and in real-world agricultural production settings, large-scale deployment and application are hampered by hardware constraints and costs. Furthermore, data quality and diversity challenges persist, and data collecting difficulties and annotation errors may have an impact on model training efficacy. The data improvement measures used by researchers are currently insufficiently comprehensive. To increase the practical use of plant leaf disease detection and recognition technology, constant progress in model generalization, resource consumption reduction, and data processing optimization is required.</p>
</sec>
<sec id="s4_4">
<label>4.4</label>
<title>Analysis and prospect of plant leaf disease identification</title>
<p>Convolutional neural networks&#x2019; architecture can be categorized as either deep or lightweight based on the difference in parameter magnitude. <xref ref-type="table" rid="T5">
<bold>Tables&#xa0;5</bold>
</xref>, <xref ref-type="table" rid="T6">
<bold>6</bold>
</xref> systematically sort out the latest research achievements in the field of intelligent recognition of plant diseases.</p>
<table-wrap id="T5" position="float">
<label>Table&#xa0;5</label>
<caption>
<p>Plant leaf disease recognition based on deep neural network.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="left">Authors, Years</th>
<th valign="middle" align="left">Class</th>
<th valign="middle" align="left">Total</th>
<th valign="middle" align="left">Collect ways</th>
<th valign="middle" align="left">Methods</th>
<th valign="middle" align="left">Performance</th>
<th valign="middle" align="left">DOI</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B117">Wang et&#xa0;al. (2022)</xref>
</td>
<td valign="middle" align="left">Cucumber</td>
<td valign="middle" align="left">4740</td>
<td valign="middle" align="left">Field shooting</td>
<td valign="middle" align="left">Improved SwinT</td>
<td valign="middle" align="left">Accuracy=98.97%<break/>Model Size =186.17m<break/>GFLOPs=8.78<break/>Inference Time (ms)=71<break/>training cost(s)=139.03</td>
<td valign="middle" align="left">10.1016/J.COMPAG.2022.107163</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B86">Obsie et&#xa0;al. (2022)</xref>
</td>
<td valign="middle" align="left">Blueberry</td>
<td valign="middle" align="left">1661</td>
<td valign="middle" align="left">Field shooting</td>
<td valign="middle" align="left">Yolov5s-CA</td>
<td valign="middle" align="left">Accuracy=96.30%<break/>Precision=75.2%<break/>Recall=61.2%<break/>Model Size=13.8MB<break/>Inference Time=11.85%<break/>mAP@50 = 68.2%</td>
<td valign="middle" align="left">10.3390/AGRICULTURE13010078</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B6">Andrew et&#xa0;al. (2022)</xref>
</td>
<td valign="middle" align="left">Apple</td>
<td valign="middle" align="left">54305</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">DenseNet-121</td>
<td valign="middle" align="left">Accuracy=99.81%<break/>F1-score=0.998<break/>Model Size=7.05M</td>
<td valign="middle" align="left">10.3390/agronomy12102395</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B52">Khasawneh et&#xa0;al. (2022)</xref>
</td>
<td valign="middle" align="left">Tomato</td>
<td valign="middle" align="left">18160</td>
<td valign="middle" align="left">Access via<break/>the Internet</td>
<td valign="middle" align="left">Transfer Learning</td>
<td valign="middle" align="left">Accuracy=99.40%</td>
<td valign="middle" align="left">10.3390/APP 12178467</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B126">Yin et&#xa0;al. (2022)</xref>
</td>
<td valign="middle" align="left">Grape</td>
<td valign="middle" align="left">4344</td>
<td valign="middle" align="left">ImageNet<break/>dataset</td>
<td valign="middle" align="left">GLD-DTL</td>
<td valign="middle" align="left">Accuracy=99.84%<break/>Precision=0.995<break/>Recall=0.995<break/>F1-score=0.995<break/>Model Size=30MB<break/>training cost(m)=1.15</td>
<td valign="middle" align="left">10.25165/j.ijabe.20221503.7062</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B7">Arun et&#xa0;al. (2022)</xref>
</td>
<td valign="middle" align="left">Grape</td>
<td valign="middle" align="left">240008</td>
<td valign="middle" align="left">Access via<break/>the Internet</td>
<td valign="middle" align="left">Conv-5 DCNN</td>
<td valign="middle" align="left">Accuracy=98.41%<break/>Precision=0.94<break/>Recall=1<break/>F1-score=0.97<break/>training cost=1000 epochs</td>
<td valign="middle" align="left">10.3390/ELECTRONICS11081266</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B128">Yu et&#xa0;al. (2022)</xref>
</td>
<td valign="middle" align="left">Soybean</td>
<td valign="middle" align="left">39446</td>
<td valign="middle" align="left">Field shooting</td>
<td valign="middle" align="left">RANet</td>
<td valign="middle" align="left">Accuracy=98.49%<break/>F1-score=98.52%<break/>Precision=98.50%<break/>Recall=98.49%<break/>Model Size=42.75MB<break/>Inference Time=0.0514</td>
<td valign="middle" align="left">10.3389/FPLS.2022.878834</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B138">Zhao Y. F. et&#xa0;al. (2022)</xref>
</td>
<td valign="middle" align="left">Tomato</td>
<td valign="middle" align="left">31361</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">DoubleGAN</td>
<td valign="middle" align="left">Accuracy=99.80%</td>
<td valign="middle" align="left">10.1109/TCBB.2021.3056683</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B91">Prabhjot et&#xa0;al. (2022)</xref>
</td>
<td valign="middle" align="left">Grape</td>
<td valign="middle" align="left">9027</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">EfficienNet B7</td>
<td valign="middle" align="left">Precision=99.80%<break/>Accuracy=98.70%<break/>Recall=99.00%<break/>F1-score=97.00%</td>
<td valign="middle" align="left">10.3390/S22020575</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B30">Fan et&#xa0;al. (2022)</xref>
</td>
<td valign="middle" align="left">Apple</td>
<td valign="middle" align="left">3568</td>
<td valign="middle" align="left">Apple Leaf</td>
<td valign="middle" align="left">InceptionV3</td>
<td valign="middle" align="left">Accuracy=98.46%</td>
<td valign="middle" align="left">10.1016/J.COMPAG.2022.106892</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B84">Nag et&#xa0;al. (2023)</xref>
</td>
<td valign="middle" align="left">Tomato</td>
<td valign="middle" align="left">18160</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">Improved CNNs</td>
<td valign="middle" align="left">Accuracy=99.85%</td>
<td valign="middle" align="left">10.1016/J.COMPELECENG.2023.108995</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B137">Zhang Y. K. et al. (2023)</xref>
</td>
<td valign="middle" align="left">Apple</td>
<td valign="middle" align="left">10000</td>
<td valign="middle" align="left">Field shooting</td>
<td valign="middle" align="left">BCTNet</td>
<td valign="middle" align="left">Accuracy=85.23%<break/>Recall=78.97%<break/>Model Size=79.04M<break/>training cost=300 epochs<break/>Inference Time=33FPS<break/>mAP=85.23%<break/>Map@50 = 90.65%</td>
<td valign="middle" align="left">10.1016/J.COMPAG.2023.108132</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B121">Xu et&#xa0;al. (2023)</xref>
</td>
<td valign="middle" align="left">Wheat</td>
<td valign="middle" align="left">7239</td>
<td valign="middle" align="left">Field shooting</td>
<td valign="middle" align="left">RFE-CNN</td>
<td valign="middle" align="left">Accuracy=99.95%<break/>Precision=99.73%<break/>Recall=98.24%<break/>Model Size=1.235M<break/>training cost=900 epochs<break/>Inference Time=1.5s/image</td>
<td valign="middle" align="left">10.1016/J.PMPP.2022.101940</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B98">Saidani et&#xa0;al. (2023)</xref>
</td>
<td valign="middle" align="left">Rice</td>
<td valign="middle" align="left">20000</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">Based on PlantNet</td>
<td valign="middle" align="left">Accuracy=97%<break/>Model Size=2.47M<break/>Inference Time=0.04s</td>
<td valign="middle" align="left">10.31577/cai_2023_6_1378</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B26">Daniya and Vigneshwari (2023)</xref>
</td>
<td valign="middle" align="left">Rice</td>
<td valign="middle" align="left">6213</td>
<td valign="middle" align="left">Rice disease<break/>dataset</td>
<td valign="middle" align="left">RWW-NN</td>
<td valign="middle" align="left">Accuracy=90.7%</td>
<td valign="middle" align="left">10.1016/J.ADVEN-GSOFT.2023.103472</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B22">Cui and Tian (2023)</xref>
</td>
<td valign="middle" align="left">Rice</td>
<td valign="middle" align="left">2500</td>
<td valign="middle" align="left">Field shooting</td>
<td valign="middle" align="left">YOLO v3</td>
<td valign="middle" align="left">Accuracy=91.84%<break/>Precision=91.12%<break/>Recall=91.84%<break/>F1-score=91.87%<break/>training cost=1500 epochs<break/>mAP=86.72%</td>
<td valign="middle" align="left">10.3390/AGRICULTURE13010170</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B15">Buchke and Mayuri, 2024</xref>
</td>
<td valign="middle" align="left">Tomato</td>
<td valign="middle" align="left">21000</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">EfficientNett</td>
<td valign="middle" align="left">Accuracy=99.50%<break/>Precision=0.9950<break/>Recall=0.9950<break/>F1-score=0.9950</td>
<td valign="middle" align="left">10.21203/rs.3.rs-3149045/v1</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B134">Zhang R. F. et&#xa0;al. (2023)</xref>
</td>
<td valign="middle" align="left">Tomato</td>
<td valign="middle" align="left">16453</td>
<td valign="middle" align="left">PDDA,<break/>PlantVillage</td>
<td valign="middle" align="left">IBSA_Net</td>
<td valign="middle" align="left">Accuracy=99.70%<break/>Precision=0.989<break/>Recall=0.993<break/>F1-score=0.991</td>
<td valign="middle" align="left">10.3390/APP13074348</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B44">Hu B. et al. (2023)</xref>
</td>
<td valign="middle" align="left">Apple</td>
<td valign="middle" align="left">54303</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">FOTCA</td>
<td valign="middle" align="left">Accuracy=99.80%<break/>F1-score=0.9931<break/>Model Size=59.14M<break/>training cost=11 epochs</td>
<td valign="middle" align="left">10.3389/FPLS.2023.1231903</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B60">Liang and Jiang (2023)</xref>
</td>
<td valign="middle" align="left">Tomato</td>
<td valign="middle" align="left">22930</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">ResNet 50-DPA</td>
<td valign="middle" align="left">Accuracy=99.28%<break/>Precision=99.29%<break/>Recall=99.28%<break/>F1-score=99.28%<break/>training cost=200 epochs</td>
<td valign="middle" align="left">10.3389/FPLS.2023. 1258658</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B72">Liu et&#xa0;al. (2023)</xref>
</td>
<td valign="middle" align="left">Apple</td>
<td valign="middle" align="left">3171</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">Inception-V3</td>
<td valign="middle" align="left">Accuracy=99.45%<break/>Precision=99.84%<break/>Recall=99.10%<break/>F1-score=99.00%</td>
<td valign="middle" align="left">10.1109/TCBB.2022.3195 291.</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B81">Md et&#xa0;al. (2023)</xref>
</td>
<td valign="middle" align="left">Rice</td>
<td valign="middle" align="left">3710</td>
<td valign="middle" align="left">Rice Leaf<break/>dataset</td>
<td valign="middle" align="left">PlantDet</td>
<td valign="middle" align="left">Accuracy=98.53%<break/>Precision=98.50%<break/>Recall=98.35%<break/>F1-score=98.42%</td>
<td valign="middle" align="left">10.1109/ACCESS.2023.3264835</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B21">Cui et&#xa0;al. (2023)</xref>
</td>
<td valign="middle" align="left">Tree</td>
<td valign="middle" align="left">259800</td>
<td valign="middle" align="left">Field shot</td>
<td valign="middle" align="left">DINO-ViT</td>
<td valign="middle" align="left">Accuracy=96.95%</td>
<td valign="middle" align="left">10.3390/PLANTS12183280</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B118">Wang B. B. et al. (2023)</xref>
</td>
<td valign="middle" align="left">Apple</td>
<td valign="middle" align="left">54306</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">ULEN</td>
<td valign="middle" align="left">Accuracy=98.13%<break/>Precision=98.13%<break/>Recall=97.49%<break/>F1-score=97.76%<break/>Model Size=111758P<break/>Inference Time=0.037s</td>
<td valign="middle" align="left">10.1007/s11119-023-10020-0</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B72">Liu and Zhang (2022)</xref>
</td>
<td valign="middle" align="left">Grape</td>
<td valign="middle" align="left">2056</td>
<td valign="middle" align="left">Field shooting</td>
<td valign="middle" align="left">GLDCNet</td>
<td valign="middle" align="left">Accuracy=99.57%<break/>Precision=98.48%<break/>Recall=98.49%<break/>F1-score=98.99%</td>
<td valign="middle" align="left">10.1016/J.COMPAG.2024.108668</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B116">Wang H. M. et al. (2024)</xref>
</td>
<td valign="middle" align="left">Maize</td>
<td valign="middle" align="left">3686</td>
<td valign="middle" align="left">Field Shot</td>
<td valign="middle" align="left">TC-MRSN</td>
<td valign="middle" align="left">Accuracy=99.59%<break/>Precision=94.88%<break/>Recall=93.21%<break/>F1-score=93.52%<break/>Model Size=5M</td>
<td valign="middle" align="left">10.1016/J.COMPAG.2024.108915</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B38">Hatice and Veysel (2024)</xref>
</td>
<td valign="middle" align="left">Potato</td>
<td valign="middle" align="left">1500</td>
<td valign="middle" align="left">Muhammad</td>
<td valign="middle" align="left">MDSCIRnet</td>
<td valign="middle" align="left">Accuracy=99.33%</td>
<td valign="middle" align="left">10.1016/j.engappai.2024.108307</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B31">Farian and Neema (2024)</xref>
</td>
<td valign="middle" align="left">Grape</td>
<td valign="middle" align="left">4040</td>
<td valign="middle" align="left">Access via<break/>the Internet</td>
<td valign="middle" align="left">CNN+RF</td>
<td valign="middle" align="left">Accuracy=95.34%<break/>Recall=99.00%<break/>F1-score=99.00%</td>
<td valign="middle" align="left">10.1016/J.HELIYON.2024.E33377</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B92">Qiu et&#xa0;al. (2024)</xref>
</td>
<td valign="middle" align="left">Tomato</td>
<td valign="middle" align="left">18363</td>
<td valign="middle" align="left">Access via<break/>the Internet</td>
<td valign="middle" align="left">Improved<break/>AlexNet</td>
<td valign="middle" align="left">Accuracy=98.83%<break/>Precision=99.77%<break/>Recall=99.15%<break/>F1-score=99.40%</td>
<td valign="middle" align="left">10.1016/J.HELIYON.2024.E33555</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B51">Kasana and Rathore, 2024</xref>
</td>
<td valign="middle" align="left">Potato</td>
<td valign="middle" align="left">7101</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">XceptionNet</td>
<td valign="middle" align="left">Accuracy=97.25%</td>
<td valign="middle" align="left">10.3390/APP 14178038</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B136">Zhang E. X. et&#xa0;al. (2024)</xref>
</td>
<td valign="middle" align="left">Tomato</td>
<td valign="middle" align="left">50000</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">YOLO v4</td>
<td valign="middle" align="left">Accuracy=98.17%<break/>Precision=98.73%<break/>Recall=98.69%<break/>F1-score=98.71%<break/>Model Size=0.91M</td>
<td valign="middle" align="left">10.3389/FPLS.2024.1420584</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B89">Pan et&#xa0;al. (2024b)</xref>
</td>
<td valign="middle" align="left">Rice</td>
<td valign="middle" align="left">60235</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">Mobile Net V2</td>
<td valign="middle" align="left">Accuracy=99.16%<break/>Model Size=5.79MB</td>
<td valign="middle" align="left">10.1109/ACCESS.2024.3444796</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B43">Hicham et&#xa0;al. (2024)</xref>
</td>
<td valign="middle" align="left">Bean</td>
<td valign="middle" align="left">3296</td>
<td valign="middle" align="left">Field shooting</td>
<td valign="middle" align="left">YOLO-NASM</td>
<td valign="middle" align="left">Accuracy=88.80%</td>
<td valign="middle" align="left">10.3233/JIFS-23615<break/>4</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B93">Rashid et&#xa0;al. (2025)</xref>
</td>
<td valign="middle" align="left">Tomato</td>
<td valign="middle" align="left">18835</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">MobileNetV3</td>
<td valign="middle" align="left">Accuracy=98.77%<break/>Precision=99.00%<break/>Recall=99.00%<break/>F1-score=99.00%<break/>training cost=4h</td>
<td valign="middle" align="left">10.1109/ACCESS.2025.3550205</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B142">Zhu et&#xa0;al. (2025a)</xref>
</td>
<td valign="middle" align="left">Citrus</td>
<td valign="middle" align="left">2283</td>
<td valign="middle" align="left">CCL&#x2019;2</td>
<td valign="middle" align="left">YOLOv5</td>
<td valign="middle" align="left">Accuracy=96.1%<break/>Precision=90.4%<break/>Recall=90.90%<break/>Model Size=14.3MB<break/>Inference Time=61.77FPS<break/>mAP@0.5 = 92.1%</td>
<td valign="middle" align="left">10.3390/sym17040617</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B47">Islam et&#xa0;al. (2025)</xref>
</td>
<td valign="middle" align="left">Rice</td>
<td valign="middle" align="left">30000</td>
<td valign="middle" align="left">Online</td>
<td valign="middle" align="left">PlantCareNet</td>
<td valign="middle" align="left">Accuracy=97%<break/>Precision=97%<break/>Recall=97%<break/>F1-score=97%<break/>Model Size=19.2MB<break/>Inference Time=0.0021s/</td>
<td valign="middle" align="left">10.1186/s13007-025-01366-9</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B54">Kumar and Bidarakundi (2025)</xref>
</td>
<td valign="middle" align="left">Coffee</td>
<td valign="middle" align="left">58555</td>
<td valign="middle" align="left">Arabica</td>
<td valign="middle" align="left">Coffee-Net</td>
<td valign="middle" align="left">Accuracy=99.95%<break/>Precision=97.36%<break/>Recall=97.25%<break/>F1-score=96.88%</td>
<td valign="middle" align="left">10.1109/ACCESS.2025.3525661</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B110">Tang et&#xa0;al. (2025)</xref>
</td>
<td valign="middle" align="left">Maize</td>
<td valign="middle" align="left">5796</td>
<td valign="middle" align="left">PlantVillage<break/>dataset</td>
<td valign="middle" align="left">ResNet50</td>
<td valign="middle" align="left">Accuracy=98.79%<break/>Precision=88.63%<break/>Recall=88.33%<break/>F1-score=88.28%</td>
<td valign="middle" align="left">10.1109/ACCESS.2025.3525661</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B32">Gaashani et&#xa0;al. (2025)</xref>
</td>
<td valign="middle" align="left">Maize</td>
<td valign="middle" align="left">4188</td>
<td valign="middle" align="left">Online</td>
<td valign="middle" align="left">MSCPNet</td>
<td valign="middle" align="left">Accuracy=97.44%<break/>Precision=96.76%<break/>Recall=97.37%<break/>F1-score=97.04%<break/>Inference Time=0.0111s</td>
<td valign="middle" align="left">10.1109/ACCESS.2024.3524729</td>
</tr>
<tr>
<td valign="middle" align="left">
<xref ref-type="bibr" rid="B94">Raza et&#xa0;al. (2025)</xref>
</td>
<td valign="middle" align="left">Potato</td>
<td valign="middle" align="left">3261</td>
<td valign="middle" align="left">Online</td>
<td valign="middle" align="left">DENSE-NET-121</td>
<td valign="middle" align="left">Accuracy=99.08%<break/>Precision=98.00%<break/>Recall=96.00%<break/>F1-score=97.00%</td>
<td valign="middle" align="left">10.1016/j.heliyon.2025.e42318</td>
</tr>
</tbody>
</table>
</table-wrap>
<table-wrap id="T6" position="float">
<label>Table&#xa0;6</label>
<caption>
<p>Plant leaf disease recognition based on lightweight network.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="left">Authors, Years</th>
<th valign="top" align="left">Class</th>
<th valign="top" align="left">Total</th>
<th valign="top" align="left">Collect ways</th>
<th valign="top" align="left">Methods</th>
<th valign="top" align="left">Performer</th>
<th valign="top" align="left">DOI</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B63">Liu S. et&#xa0;al. (2023)</xref>
</td>
<td valign="top" align="left">Apple</td>
<td valign="top" align="left">700</td>
<td valign="top" align="left">Field shooting</td>
<td valign="top" align="left">Improved MobileNetV2</td>
<td valign="top" align="left">Accuracy=0.96<break/>Precision=0.97<break/>recall=0.92<break/>F1-score=0.93</td>
<td valign="top" align="left">10.1590/fst.104322.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B97">Sabbir et&#xa0;al. (2022)</xref>
</td>
<td valign="top" align="left">Aplle</td>
<td valign="top" align="left">54309</td>
<td valign="top" align="left">PlantVillage</td>
<td valign="top" align="left">MobileNetV 2</td>
<td valign="top" align="left">Accuracy=0.99</td>
<td valign="top" align="left">10.1109/ACCESS.2022.3187203.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B75">Lu et&#xa0;al. (2022)</xref>
</td>
<td valign="top" align="left">Maize</td>
<td valign="top" align="left">21967</td>
<td valign="top" align="left">Internet</td>
<td valign="top" align="left">ECA-ShuffleNetV2</td>
<td valign="top" align="left">Accuracy=0.96<break/>Precision=0.94<break/>recall=0.95<break/>F1-score=0.94</td>
<td valign="top" align="left">10.3390/AGRICULT- URE12111929.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B99">Samia et&#xa0;al. (2023)</xref>
</td>
<td valign="top" align="left">Apple</td>
<td valign="top" align="left">61486</td>
<td valign="top" align="left">PlantVillage</td>
<td valign="top" align="left">MULTINET</td>
<td valign="top" align="left">Accuracy=0.63<break/>Precision=0.65<break/>F1-score=0.58</td>
<td valign="top" align="left">10.1109/ACCESS.2023.330 3868.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B50">Kang et&#xa0;al. (2023)</xref>
</td>
<td valign="top" align="left">Maize</td>
<td valign="top" align="left">2775</td>
<td valign="top" align="left">Field shooting</td>
<td valign="top" align="left">CenterNet</td>
<td valign="top" align="left">Accuracy=0.85<break/>Precision=0.91<break/>recall=0.61<break/>F1-score=0.69<break/>mAP=0.85</td>
<td valign="top" align="left">10.3390/APP131810441.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B23">Dai G. W. et&#xa0;al. (2023)</xref>
</td>
<td valign="top" align="left">Leaf</td>
<td valign="top" align="left">4503</td>
<td valign="top" align="left">open data sets</td>
<td valign="top" align="left">PPLC-Net</td>
<td valign="top" align="left">Accuracy=0.99</td>
<td valign="top" align="left">10.1016/J.JKSUCI.2023.101555.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B131">Zhang R. et&#xa0;al. (2024)</xref>
</td>
<td valign="top" align="left">Rice</td>
<td valign="top" align="left">3357</td>
<td valign="top" align="left">Field shooting</td>
<td valign="top" align="left">YOLO-CRD</td>
<td valign="top" align="left">Accuracy=0.90</td>
<td valign="top" align="left">10.32604/PHYTON.2024.052397.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B19">Cheng et&#xa0;al. (2024)</xref>
</td>
<td valign="top" align="left">Rice</td>
<td valign="top" align="left">1500</td>
<td valign="top" align="left">Field shots</td>
<td valign="top" align="left">D-R-C-YOLOv7-Tiny</td>
<td valign="top" align="left">Accuracy=0.92<break/>Precision=0.92<break/>recall=0.82<break/>F1-score=0.87<break/>mAP=0.92</td>
<td valign="top" align="left">10.3390/AGRICULTURE1405 0709.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B102">Shwetha et&#xa0;al. (2024)</xref>
</td>
<td valign="top" align="left">Jasmine</td>
<td valign="top" align="left">2000</td>
<td valign="top" align="left">Field shooting</td>
<td valign="top" align="left">LeafSpotNet</td>
<td valign="top" align="left">Accuracy=0.97<break/>Precision=0.94</td>
<td valign="top" align="left">10.1016/J.AIIA.2024.02.002.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B41">He and Tong, 2025</xref>
</td>
<td valign="top" align="left">Tomato</td>
<td valign="top" align="left">3641</td>
<td valign="top" align="left">Online</td>
<td valign="top" align="left">LT-YOLO</td>
<td valign="top" align="left">Accuracy=0.90</td>
<td valign="top" align="left">10.326 04/cmc.2025.060550.</td>
</tr>
<tr>
<td valign="top" align="left">
<xref ref-type="bibr" rid="B109">Sun et&#xa0;al. (2025)</xref>
</td>
<td valign="top" align="left">Tomato</td>
<td valign="top" align="left">800</td>
<td valign="top" align="left">Camera</td>
<td valign="top" align="left">Faster RCNN</td>
<td valign="top" align="left">Accuracy=0.97<break/>recall=0.85</td>
<td valign="top" align="left">10.3389/fpls.2024.1491593.</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The existing database of plant disease images (both proprietary and open-source) exhibits a notable spatial distribution divergence. In contrast to the variable scenes faced by agricultural plant protection robots in field operations (e.g., light fluctuation, branch occlusion, and dynamic shooting angle), the current research paradigm primarily focuses on the controlled environment of human-computer cooperation (such as standardized shooting distance and fixed viewing angle), as evidenced by the disease target area, which is primarily concentrated in the center of the image and accounts for more than 60% of the image. Systematic experimental analysis reveals that the recognition accuracy of convolutional neural networks optimized on specific datasets (e.g., ResNet-50 with 98.2% accuracy in the PlantVillage) can drop by 12%-15% when migrated to cross-domain disease image libraries, emphasizing the importance of adaptive model architecture screening for different application scenarios. Although some studies have investigated mobile disease identification technologies (e.g., lightweight deployment based on TensorFlow Lite), their reasoning delays are typically greater than 200 milliseconds (EfficientNet-B0 takes approximately 340 milliseconds per frame under the MXNet framework), making it difficult to meet the millisecond response requirements of real-time plant protection decisions.</p>
<p>The study&#x2019;s content and network model selection are changing dramatically in the deep learning application to plant disease recognition. In terms of research content, the focus has shifted from improving the disease recognition accuracy of convolutional neural networks to optimizing the operation efficiency. In addition, the research scene has also shifted from a single background in the laboratory to disease recognition in a complex background in the natural environment, and the detection method has also expanded from static image analysis to dynamic video surveillance. At the same time, the scope of disease feature extraction has been expanded from single-leaf disease to multiple plant organs, including root, stem, leaf, flower, and fruit. In terms of network models, researchers are moving from traditional networks such as VGG, GoogleNet, ResNet, and DenseNet to lighter network structures such as MobileNet and EfficientNet. This shift aims to reduce model parameters and speed up operations without sacrificing model performance to meet the needs of artificial intelligence (AI) edge computing platforms with limited computing resources.</p>
</sec>
</sec>
<sec id="s5">
<label>5</label>
<title>Synthesis</title>
<p>When using deep learning for plant pathology research in various agricultural settings, it is vital to select technical solutions flexibly based on individual circumstances and needs, as well as optimize practical benefits by merging cutting-edge technology. Because of available computational resources and stringent recognition accuracy criteria, the two-stage detection model should be preferred for high-precision analysis scenarios in the laboratory. High recognition accuracy can be achieved on typical datasets by divorcing the architecture of the regional recommendation network from the classification branch. However, it must accept the trade-off between high model complexity and poor inference speed, making it more appropriate as a core tool for disease mechanism research and precise diagnosis. Real-time monitoring scenarios in the field require a compromise between detection speed and dynamic environmental adaptability. The majority of detectors are either one-stage (YOLOv8, SSD optimized version) or anchor-free (CenterNet). The former considerably improves inference efficiency by implementing an end-to-end prediction method. The latter employs keypoint estimate rather than anchor box generation, ensuring great robustness in occlusion and complex backdrops. They can all support the real-time processing requirements of field mobile devices. Deep separable convolution and channel pruning approaches can be utilized to compress model parameters to less than 5 million, adjusting to the computational limits of low-power devices.In addition, the weakly supervised localization recognition method achieves lesion localization through image-level labels, further reducing annotation costs and providing a feasible solution for model deployment in resource-limited areas.</p>
<p>Future technological integration can concentrate on three key areas: To begin, we introduce the Transformer self-attention mechanism, which dynamically captures the long-range dependencies of disease regions, increasing model detection stability in scenarios with overlapping leaves and uneven lighting. Second, provide semi-automatic annotation tools that prioritize annotating high-value samples using self-learning methodologies, lowering annotation costs by more than 60% while maintaining data quality. Third, using neural architecture search (NAS) technology, heterogeneous network architectures are automatically generated for difficult field situations such as multi-scale illness spots and background interference. A lightweight architecture that merges dilated convolution and attention modules via reinforcement learning search achieves both accuracy and efficiency optimization. The following methodologies can help to improve the application of deep learning models in agricultural contexts, driving the progress of plant pathology technology toward precision and universality.</p>
<p>The actual field setting presents numerous obstacles, including reciprocal blockage of leaves, changes in sunlight, and complicated backdrops such as dirt and weeds. The available approaches are generally weakly resilient. When there is severe occlusion or an exceedingly complex backdrop, the performance of both the first-stage and second-stage detectors may suffer greatly. The anchor-free box approach uses center points or keypoints to provide relatively consistent performance under occlusion. Because of the lack of exact spatial information learning, lightweight recognition networks and weakly supervised algorithms are especially susceptible to background interference. The adaptation to occlusion improves the model&#x2019;s robustness in complicated, changing field situations.</p>
<p>In terms of annotation cost, fully supervised object identification algorithms rely on a huge amount of precisely annotated bounding boxes and category information, and the annotation cost is quite high, making the technology difficult to implement. Lightweight recognition networks just need image-level labels like &#x201c;disease&#x201d; or &#x201c;health,&#x201d; as well as disease categories, which considerably reduces the annotation burden and makes them more feasible for large-scale classification systems. In scenarios when annotation resources are severely restricted, weakly supervised localization recognition techniques offer an alternate solution by utilizing image-level labels to find lesions. However, their localization accuracy and robustness are typically lower than fully supervised systems.</p>
<p>At the model deployment level, although researchers have successfully deployed models on edge devices, there are significant differences in computing power, storage capacity, and power consumption among different edge devices. This poses a great challenge to the adaptation of the model. Some models that perform well on high-performance devices may not be able to run at all or run at extremely slow speeds on resource-limited devices, which cannot meet the needs of real-time detection. For example, deploying complex deep learning models on low-end agricultural sensor devices with extremely limited resources still faces technical challenges. The maintenance and update mechanism after model deployment is also incomplete. The agricultural production environment is constantly changing, and new diseases continue to emerge. The characteristics of the original disease may also change. If the model cannot be updated in a timely manner, its detection accuracy will gradually decrease. However, model updates often require the collection of large amounts of data and retraining of the model, which is costly, time-consuming, and difficult to implement quickly on agricultural sites.</p>
<p>Performance trade-off is an unavoidable issue for deep learning models in agricultural disease detection. Accuracy, speed, and computational resource consumption are the three key indicators for measuring model performance. In practical applications, there is often a mutually restrictive relationship between them. Some models adopt complex network structures and a large number of parameters to improve detection accuracy. Although this enables the model to achieve high accuracy on specific datasets, it also brings problems of excessive computational resource consumption and slow detection speed. In agricultural fields, especially in large-scale farmland, it is necessary to quickly detect diseases in a large number of crops. Slow models cannot meet the actual production pace, which may lead to delayed detection and treatment of diseases, resulting in serious economic losses. On the contrary, in order to pursue detection speed and reduce computational resource consumption, some lightweight models have been proposed. However, these models often have insufficient accuracy and may result in false positives or false negatives. For example, in complex backgrounds or situations where early symptoms of diseases are not obvious, lightweight models may not be able to accurately identify diseases, which can affect disease prevention and control. How to find the optimal balance between accuracy, speed, and computational resource consumption is one of the urgent problems that deep learning needs to solve in agricultural disease detection.</p>
<p>The transferability of models to different regions is another major challenge for the application of deep learning in agricultural disease detection. There are differences in climate, soil, and planting variety factors in different regions. These differences can lead to variations in the types, patterns, and symptoms of plant diseases. Currently, most deep learning is trained and validated on datasets specific to a particular region. When these models are applied to other regions, their performance often deteriorates significantly due to differences in data distribution. For example, when the rice disease detection model trained in the southern region is applied to the northern region, it may not accurately identify the unique rice diseases in the north. In addition, the level of agricultural management and planting patterns in different regions can also affect the applicability of the model. Some regions may adopt advanced agricultural technologies and management methods. The crop growth environment is relatively stable, and the occurrence of diseases is relatively regular. Other regions may have outdated agricultural technology, complex and variable planting environments, and unpredictable disease occurrence. This makes the application of the same model in different regions require a lot of adjustment and optimization according to the local specific conditions, increasing the difficulty and cost of model application.</p>
<p>In conclusion, while deep learning has promising applications in agricultural disease detection, there are still many issues with model deployment, performance trade-offs, and model transferability between areas. Only through ongoing research and innovation to address these critical concerns will deep learning technology actually play a vital role in agricultural production and global food security.</p>
</sec>
<sec id="s6" sec-type="conclusions">
<label>6</label>
<title>Conclusions</title>
<p>With the help of deep learning technology, plant disease target detection and recognition studies have become a popular topic in scientific research circles both at home and abroad. A substantial body of research has greatly increased the accuracy of plant disease identification and contributed significantly to technological progress in this field. However, in actual implementations, these deep learning-based solutions encounter numerous challenges, and there is still a long way to go before fully and efficiently solving the problem of plant disease detection and diagnosis.</p>
<p>Data acquisition challenges: creating high-quality data takes a significant amount of time, labor, and money. Images of diverse plant varieties, disease types, stages of plant diseases, and other conditions should be gathered during the data set construction phase. In the processing step that follows, individuals with specialized knowledge should correctly identify the status of the plant disease.</p>
<p>Data imbalance: There is less data on plant diseases that are difficult to collect and rare than on plant diseases that are easy to collect and common, resulting in insufficient learning of deep learning models and affecting plant disease recognition accuracy.</p>
<p>Data noise interference: During the acquisition process, the image may be affected by external factors such as uneven natural lighting, a complex plant background, a poor shooting angle, and shooting height. These factors will interfere with the extraction and learning of plant disease features.</p>
<p>Computational resource requirements: Deep learning models have complex structures, and training and prediction require powerful computational resources. This increases the research threshold and research cost of plant disease identification.</p>
<p>Model optimization is difficult: Deep learning models have overfitting and underfitting problems, and hyperparameter adjustment requires a lot of experiments and rich experience. Problems such as vanishing or exploding gradients may also occur during model training, affecting the convergence and performance of the model.</p>
<p>Poor model interpretability: Because deep learning models are a black box, it is difficult to intuitively understand the models&#x2019; base and procedure for judging plant illnesses, as well as to troubleshoot and optimize.</p>
<p>Environmental adaptability: the field environment is complex and changeable, and the conditions, such as light, temperature, and humidity, are greatly different. The model performs well in the laboratory environment, but its performance may decrease in actual field application.</p>
<p>High real-time needs: large-scale agricultural output, the necessity for real-time disease detection, and the model&#x2019;s reasoning speed all place increased demands on the model; some complicated models struggle to satisfy real-time requirements.</p>
<p>High deployment and maintenance costs: To deploy the model to the actual production environment, it is necessary to consider the compatibility of hardware equipment, network communication, data security, and other issues. Later model updates and maintenance also require professional technicians and resources.</p>
<p>Plant disease identification enters a promising era of change. In the field of deep learning, new models and algorithms are constantly emerging, such as transformers, lightweight neural networks, etc. With its excellent feature extraction ability, the deep learning method can capture the subtle features of plant diseases, thus significantly improving the accuracy and reliability of recognition.</p>
<p>With the advancement of sensor technology, plant physiological and environmental factors can be obtained more conveniently and comprehensively in the process of data collection so as to obtain higher quality plant disease data sets. In addition, multi-source data fusion creates conditions for building more accurate disease identification models. Plant leaf disease identification, a crucial component of precision agriculture, has drawn attention from a variety of sources, been supported by national resources, and accelerated the field&#x2019;s growth as a result of the acceleration of the agricultural modernization process and the growing demand for precision agriculture.</p>
<p>Deep learning-based plant leaf disease detection has also seen significant prospects as interdisciplinary collaboration has grown. Combining computer science, agronomy, biology, and other fields allows us to examine plant disease occurrence settings, disease development characteristics, transmission rules, and identification techniques from a variety of perspectives. Because computer science has strong algorithms and data processing capabilities, it offers effective technical support for disease detection. From the standpoint of plant development environment and cultivation management, agronomy offers valuable practical expertise and important hints. Biology reveals the pathogenic mechanism of pathogens and the defense mechanism of plants by examining biological disorders at the microscopic level. The technique for identifying plant diseases will be advanced from various perspectives thanks to this multidisciplinary study. We have good reason to think that accurate early warning, comprehensive real-time monitoring, and effective plant disease prevention and management will soon be achieved, safeguarding both sustainable agricultural development and global food security.</p>
</sec>
</body>
<back>
<sec id="s7" sec-type="author-contributions">
<title>Author contributions</title>
<p>LX: Software, Resources, Visualization, Writing &#x2013; review &amp; editing, Writing &#x2013; original draft, Formal Analysis, Validation, Methodology. ZM: Methodology, Visualization, Conceptualization, Validation, Formal Analysis, Software, Writing &#x2013; original draft, Writing &#x2013; review &amp; editing. JL: Methodology, Writing &#x2013; review &amp; editing, Software, Conceptualization, Formal Analysis, Validation, Writing &#x2013; original draft. XW: Writing &#x2013; review &amp; editing, Writing &#x2013; original draft, Validation, Software, Methodology, Visualization, Formal Analysis. YL: Validation, Formal Analysis, Writing &#x2013; original draft, Writing &#x2013; review &amp; editing, Software, Methodology. XD: Validation, Methodology, Writing &#x2013; review &amp; editing, Formal Analysis, Visualization, Software, Writing &#x2013; original draft. JZ: Project administration, Funding acquisition, Writing &#x2013; review &amp; editing, Formal Analysis, Supervision, Writing &#x2013; original draft, Methodology, Investigation.</p>
</sec>
<sec id="s8" sec-type="funding-information">
<title>Funding</title>
<p>The author(s) declare financial support was received for the research and/or publication of this article. This work is supported by Henan Province Philosophy and Social Science Education Strong Province Project (No. 2025JYQS0339), National Natural Science Foundation of China (No. 62172140), National Natural Science Foundation of China (No. 62176086), Henan Provincial Science and Technology Research Project (No. 242102210032), the Scientific Research Fund of Henan University of Urban Construction (No. K-Q2025004), and Henan Provincial Higher Education Teaching Reform Research and Practice Project under Grant (No. 2024SJGLX0475).</p>
</sec>
<ack>
<title>Acknowledgments</title>
<p>The author would like to acknowledge the Henan Institute of Science and Technology for providing all the necessary supports.</p>
</ack>
<sec id="s9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="s10" sec-type="ai-statement">
<title>Generative AI statement</title>
<p>The author(s) declare that no Generative AI was used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontierswith the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p>
</sec>
<sec id="s11" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Abade</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Ferreira</surname> <given-names>P. A.</given-names>
</name>
<name>
<surname>Vidal</surname> <given-names>F. D.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Plant diseases recognition on images using convolutional neural networks: A systematic review</article-title>. <source>Comput. Electron. Agric.</source> <volume>185</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.COMPAG.2021.106125</pub-id>
</citation></ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Abaineh</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Ejigu</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Atlabachew</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Dejen</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Tilahun</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Risks of pesticides on aquatic ecosystems and pesticide management effectiveness in Ethiopia:review</article-title>. <source>Int. J. Environ. Sci. Technol.</source> <volume>21</volume>, <fpage>8833</fpage>&#x2013;<lpage>8848</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/S13762-024-05631-7</pub-id>
</citation></ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Albahli</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Masood</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Efficient attention-based CNN network (EANet) for multi-class maize crop disease classification</article-title>. <source>Front. Plant Sci.</source> <volume>13</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/FPLS.2022.1003152</pub-id>, PMID: <pub-id pub-id-type="pmid">36311068</pub-id></citation></ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Albattan</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Nawaz</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Javed</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Masood</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Albahli</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>A novel deep learning method for detection and classification of plant diseases</article-title>. <source>Complex Intelligent. Syst.</source> <volume>2022</volume>, <fpage>5 07</fpage>&#x2013;<lpage>5524</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s40747-021-00536-1</pub-id>
</citation></ref>
<ref id="B5">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Alsaghir</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Shi</surname> <given-names>X.</given-names>
</name>
<name>
<surname>De</surname> <given-names>S. V.</given-names>
</name>
<name>
<surname>Kondoz</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Understanding dilated mathematical relation- ship between image features and the convolutional neural network&#x2019;s learnt parameters</article-title>. <source>Entropy</source> <volume>24</volume>, <elocation-id>132</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/E24010132</pub-id>, PMID: <pub-id pub-id-type="pmid">35052158</pub-id></citation></ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Andrew</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Eunice</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Popescu</surname> <given-names>D. E.</given-names>
</name>
<name>
<surname>Chowdary</surname> <given-names>M. K.</given-names>
</name>
<name>
<surname>Hemanth</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Deep learning-based leaf disease detection in crops using images for agricultural applications</article-title>. <source>Agronomy</source> <volume>12</volume>, <elocation-id>2395</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/AGRONOMY12102395</pub-id>
</citation></ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Arun J</surname> <given-names>K.</given-names>
</name>
<name>
<surname>K</surname> <given-names>Dhilip V.</given-names>
</name>
<name>
<surname>Jasinska</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Gono</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Leonowicz</surname> <given-names>Z.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>A Five Convolutional Layer Deep Convolutional Neural Network for Plant Leaf Disease Detection</article-title>. <source>Electronics</source> <volume>11</volume>, <elocation-id>1266</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/ELECTRONICS11081266</pub-id>
</citation></ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Arun</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Fan-Nong</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yi-Bing</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Shen</surname> <given-names>W. C.</given-names>
</name>
<name>
<surname>Sharma</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>UAV T-YOLO-rice: an enhanced tiny yolo networks for rice leaves diseases detection in paddy agronomy</article-title>. <source>IEEE Trans. Newtw. Sci. Eng.</source> <volume>11</volume>, <fpage>5201</fpage>&#x2013;<lpage>5216</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TNSE.2024.3350640</pub-id>
</citation></ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bao</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Fan</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Liang</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Detection and identification of tea leaf diseases based on AX-RetinaNet</article-title>. <source>Sci. Rep.</source> <volume>12</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41598-022-06181-z</pub-id>, PMID: <pub-id pub-id-type="pmid">35140287</pub-id></citation></ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Barbedo</surname> <given-names>J. G. A.</given-names>
</name>
<name>
<surname>Koenigkan</surname> <given-names>L. V.</given-names>
</name>
<name>
<surname>Halfeld-vieira</surname> <given-names>B. A.</given-names>
</name>
<name>
<surname>Costa</surname> <given-names>R. V.</given-names>
</name>
<name>
<surname>Nechet</surname> <given-names>K. L.</given-names>
</name>
<name>
<surname>Godoy</surname> <given-names>C. V.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Annotated plant pathology databas- es for image-based detection and recognition of diseas-es</article-title>. <source>IEEE Latin. America Trans.</source> <volume>16</volume>, <fpage>1749</fpage>&#x2013;<lpage>1757</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TLA.2018.8444395</pub-id>
</citation></ref>
<ref id="B11">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ben</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Henry</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Ramanna</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Plant species recognition with optimized 3D polynomial neural networks and variably overlapping time&#x2013;coherent sliding window</article-title>. <source>Multimedia. Tools Applications.</source> <volume>83</volume>, <fpage>80667</fpage>&#x2013;<lpage>80700</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/S11042-024-18480-W</pub-id>
</citation></ref>
<ref id="B12">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bhatti</surname> <given-names>U. A.</given-names>
</name>
<name>
<surname>Bazai</surname> <given-names>S. U.</given-names>
</name>
<name>
<surname>Hussain</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Fakhar</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Ku</surname> <given-names>C. S.</given-names>
</name>
<name>
<surname>Marjan</surname> <given-names>S.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Deep learning-based trees disease recognition and classification using hyperspectral data</article-title>. <source>Comput. Mater. Continua.</source> <volume>77</volume>, <fpage>681</fpage>&#x2013;<lpage>697</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.32604/CMC.2023.037958</pub-id>
</citation></ref>
<ref id="B13">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bijoy</surname> <given-names>M. H.</given-names>
</name>
<name>
<surname>Hasan</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Biswas</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Mazumdar</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Jimenez</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Ahmed</surname> <given-names>F.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Towards sustainable agriculture: A novel approach for rice leaf disease detection using dCNN and enhanced dataset</article-title>. <source>IEEE ACCESS.</source> <volume>12</volume>, <fpage>34174</fpage>&#x2013;<lpage>34191</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2024.3371511</pub-id>
</citation></ref>
<ref id="B14">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bsher</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Garabet</surname> <given-names>A.</given-names>
</name>
<name>
<surname>J&#xf6;rg</surname> <given-names>K.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Comparative performance evaluation of one-stage and two-stage object detectors for screw head detection and classification in disassembly processes</article-title>. <source>Proc. CIRP.</source> <volume>122</volume>, <fpage>527</fpage>&#x2013;<lpage>532</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.procir.2024.01.077</pub-id>
</citation></ref>
<ref id="B15">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Buchke</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Mayuri</surname> <given-names>V. R</given-names>
</name>
</person-group>. (<year>2024</year>). <article-title>Recognize and classify illnesses on tomato leaves using EfficientNet's Transfer Learning Approach with different size dataset</article-title>. <source>Signal Image Video Process</source>. <volume>18</volume>, <fpage>731</fpage>&#x2013;<lpage>746</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.21203/rs.3.rs-3149045/v1</pub-id>
</citation></ref>
<ref id="B16">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chen</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Feng</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Ren</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>YOLOv8-ACCW: lightweight grape leaf disease detection method based on improved YOLOv8</article-title>. <source>IEEE Access.</source> <volume>12</volume>, <fpage>123595</fpage>&#x2013;<lpage>123608</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2024.3453379</pub-id>
</citation></ref>
<ref id="B17">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chen</surname> <given-names>J. Y.</given-names>
</name>
<name>
<surname>Guo</surname> <given-names>J. W.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>H. W.</given-names>
</name>
<name>
<surname>Liang</surname> <given-names>Z. X.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Weakly supervised localization model for plant disease based on Siamese networks</article-title>. <source>Front. Plant Sci.</source> <volume>15</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/FPLS.2024.1418201</pub-id>, PMID: <pub-id pub-id-type="pmid">39399542</pub-id></citation></ref>
<ref id="B18">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chen</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Jiang</surname> <given-names>J.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Classification of camellia oleifera diseases in complex environments by attention and multi-dimensional feature fusion neural network</article-title>. <source>Plants-Basel</source> <volume>12</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/plants12142701</pub-id>, PMID: <pub-id pub-id-type="pmid">37514315</pub-id></citation></ref>
<ref id="B19">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Cheng</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Zhao</surname> <given-names>Z. Q.</given-names>
</name>
<name>
<surname>Feng</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Rice diseases identification method based on improved YOLOv7-tiny</article-title>. <source>Agriculture</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/AGRICULTURE14050709</pub-id>
</citation></ref>
<ref id="B20">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chinnu</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Sellaperumal</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Irene</surname> <given-names>V. P.</given-names>
</name>
<name>
<surname>Ragunath</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Sathyamoorthy</surname> <given-names>N. K.</given-names>
</name>
<name>
<surname>Vaithiyanathan</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Climate change as an existential threat to tropical fruit crop production&#x2014;A review</article-title>. <source>Agriculture</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture14112018</pub-id>
</citation></ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Cui</surname> <given-names>Z. I.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>K. L.</given-names>
</name>
<name>
<surname>Kang</surname> <given-names>C. Y.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Tao</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>M. Y.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Plant and disease recognition based on PMF pipeline domain adaptation method: using bark images as meta-dataset</article-title>. <source>Plants</source> <volume>12</volume>, <elocation-id>3280</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/PLANTS12183280</pub-id>, PMID: <pub-id pub-id-type="pmid">37765444</pub-id></citation></ref>
<ref id="B22">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Cui</surname> <given-names>J. P.</given-names>
</name>
<name>
<surname>Tian</surname> <given-names>F.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Rice plaque detection and identification based on an improved convolutional neural network</article-title>. <source>Agriculture</source> <volume>13</volume>, <elocation-id>170</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/AGRICULTURE13010170</pub-id>
</citation></ref>
<ref id="B23">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Dai</surname> <given-names>G. W.</given-names>
</name>
<name>
<surname>Fan</surname> <given-names>J. C.</given-names>
</name>
<name>
<surname>Tian</surname> <given-names>Z. M.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>C. Y.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>PPLC-Net: Neural network-based plant disease identification model supported by weather data augmentation and multi-level attention mechanism</article-title>. <source>J. King. Saud. University-Comput. Inf. Sci.</source> <volume>35</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.JKSUCI.2023.101555</pub-id>
</citation></ref>
<ref id="B24">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Dai</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>W. J.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>L. X.</given-names>
</name>
<name>
<surname>Dorjoy</surname> <given-names>M. M. H.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>S. W.</given-names>
</name>
<name>
<surname>Hong</surname> <given-names>M.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Pepper leaf disease recognition based on enhanced lightweight convolutional neural networks</article-title>. <source>Front. In. Plant Sci.</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/FPLS.2023.1230886</pub-id>, PMID: <pub-id pub-id-type="pmid">37621882</pub-id></citation></ref>
<ref id="B25">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Daniela</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Michael</surname> <given-names>G. S.</given-names>
</name>
<name>
<surname>Jorge</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Kavino</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Michael</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Teshale</surname> <given-names>A.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>Advancing common bean(Phaseolus vulgaris L.)disease detection withYOLO driven deep learning to enhance agricultural AI</article-title>. <source>Sci. Rep.</source> <volume>14</volume>, <fpage>15596</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41598-024-66281-w</pub-id>, PMID: <pub-id pub-id-type="pmid">38971939</pub-id></citation></ref>
<ref id="B26">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Daniya</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Vigneshwari</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Rider Water Wave-enabled deep learning for disease detection in rice plant</article-title>. <source>Adv. Eng. Software.</source> <volume>182</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.ADVEN-GSOFT.2023.103472</pub-id>
</citation></ref>
<ref id="B27">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Devisurya</surname> <given-names>V.</given-names>
</name>
<name>
<surname>Devi</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Anitha</surname> <given-names>N.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Early detection of major diseases in turmeric plant using improved deep learning algorithm</article-title>. <source>Bull. Of Polish. Acad. Of Sciences-Techical. Sci.</source> <volume>70</volume>, <fpage>e140689</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.24425/bpasts.2022.140689</pub-id>
</citation></ref>
<ref id="B28">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Dey</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Haque</surname> <given-names>U.</given-names>
</name>
<name>
<surname>Khatun</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Ahmed</surname> <given-names>R.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Comparative performance of four CNN-based deep learning variants in detecting Hispa pest, two fungal diseases, and NPK deficiency symptoms of rice (Oryza sativa)</article-title>. <source>Comput. Electron. Agric.</source> <volume>202</volume>, <fpage>1</fpage>&#x2013;<lpage>11</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2022.107340</pub-id>
</citation></ref>
<ref id="B29">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Di</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Q.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>A method of detecting apple leaf diseases based on improved convolutional neural network</article-title>. <source>PloS One</source> <volume>17 rf</volume>, <fpage>59069</fpage>&#x2013;<lpage>59080</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/jou-rnal.pone.0262629</pub-id>, PMID: <pub-id pub-id-type="pmid">35104299</pub-id></citation></ref>
<ref id="B30">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fan</surname> <given-names>X. J.</given-names>
</name>
<name>
<surname>Luo</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Mu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Tjahjadi</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Ren</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Leaf image based plant disease identification using transfer learning and feature fusion</article-title>. <source>Comput. Electron. Agric.</source> <volume>196</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.COMPAG.2022.10689</pub-id>
</citation></ref>
<ref id="B31">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Farian</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Neema</surname> <given-names>N.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Ensemble model for grape leaf disease detection using CNN feature extractors and random forest classifier</article-title>. <source>Heliyon</source> <volume>10</volume>, <elocation-id>e33377</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.HELIYON.2024.E33377</pub-id>, PMID: <pub-id pub-id-type="pmid">39027444</pub-id></citation></ref>
<ref id="B32">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gaashani</surname> <given-names>M. S. A. M.</given-names>
</name>
<name>
<surname>Alkanhel</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Ali</surname> <given-names>M. A. S.</given-names>
</name>
<name>
<surname>Muthanna</surname> <given-names>M. S. A.</given-names>
</name>
<name>
<surname>Aziz</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Muthanna</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>MSCPNet: A multi-scale convolutional pooling network for maize disease classification</article-title>. <source>IEEE Access.</source> <volume>13</volume>, <fpage>11423</fpage>&#x2013;<lpage>11446</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2024.3524729</pub-id>
</citation></ref>
<ref id="B33">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ganaie</surname> <given-names>M. A.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>M. H.</given-names>
</name>
<name>
<surname>Malik</surname> <given-names>A. K.</given-names>
</name>
<name>
<surname>Tanveer</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Suganthan</surname> <given-names>P. N.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Ensemble deep learning: A review</article-title>. <source>Eng. Appl. Artif. Intell.</source> <volume>115</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.ENGAPPAI.2022.105151</pub-id>
</citation></ref>
<ref id="B34">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gong</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>A high-precision detection method of apple leaf diseases using improved faster R-CNN</article-title>. <source>Agriculture-Basel</source> <volume>13</volume>, <elocation-id>240</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture13020240</pub-id>
</citation></ref>
<ref id="B35">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Guo</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Feng</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Grape leaf disease detection based on attention mechanisms</article-title>. <source>Comput. Electron. Agricult.</source> <volume>15</volume>, <fpage>205</fpage>&#x2013;<lpage>212</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.25165/j.ijabe.20221505.7548</pub-id>
</citation></ref>
<ref id="B36">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Haleem</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Jamil</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Bilal</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Yasir</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Muhammad</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Atta</surname> <given-names>U.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>EfficientNet-based robust recognition of peach plant diseases in field images</article-title>. <source>Comput. Materials&amp;Continua.</source> <volume>71</volume>, <fpage>2073</fpage>&#x2013;<lpage>2089</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.32604/CMC.2022.018961</pub-id>
</citation></ref>
<ref id="B37">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Haruna</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Qin</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Mbyamm</surname> <given-names>K. M. J.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>An improved approach to detection of rice leaf disease with GAN-based data augmentation pipeline</article-title>. <source>Appl. Sci.</source> <volume>13</volume>, <elocation-id>1346</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/APP13031346</pub-id>
</citation></ref>
<ref id="B38">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hatice</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Veysel</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Potato leaf disease detection with a novel deep learning model based on depthwise separable convolution and transformer networks</article-title>. <source>Eng. Appl. Artif. Intell.</source> <volume>133</volume>, <elocation-id>108307</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.ENGAPPAI.2024.108307</pub-id>
</citation></ref>
<ref id="B39">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>He</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Weng</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Combining multicolor fluorescence imaging with multispectral reflectance imaging for rapid citrus Huanglongbing detection based on lightweight convolutional neural network using a handheld device</article-title>. <source>Comput. Electron. Agric.</source> <volume>194</volume>, <fpage>1</fpage>&#x2013;<lpage>10</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2022.106808</pub-id>
</citation></ref>
<ref id="B40">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>He</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Ren</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Fu</surname> <given-names>W.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>YOLOv11-RCDWD: A new efficient model for detecting maize leaf diseases based on the improved YOLOv11</article-title>. <source>Appl. Sciences-Basel.</source> <volume>15</volume>(<issue>8</issue>). doi:&#xa0;<pub-id pub-id-type="doi">10.3390/app15084535</pub-id>
</citation></ref>
<ref id="B41">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>He</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Tong</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>LT-YOLO: A lightweight network for detecting tomato leaf diseases</article-title>. <source>Cmc-Computers. Mater. Continua.</source> <volume>82</volume>, <fpage>4301</fpage>&#x2013;<lpage>4317</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.32604/cmc.2025.060550</pub-id>
</citation></ref>
<ref id="B42">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>He</surname> <given-names>Y. J.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>N. Y.</given-names>
</name>
<name>
<surname>Ge</surname> <given-names>X. J.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>S. Q.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>L. F.</given-names>
</name>
<name>
<surname>Kong</surname> <given-names>M. H.</given-names>
</name>
<etal/>
</person-group>. (<year>2025</year>). <article-title>Passion fruit disease detection using sparse parallel attention mechanism and optical sensing</article-title>. <source>Agriculture-Basel</source> <volume>15</volume> (<issue>8</issue>). doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture15070733</pub-id>
</citation></ref>
<ref id="B43">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hicham</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Jamal</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Abdelilah</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Advancing disease identification in fava bean crops: A novel deep learning solution integrating YOLO-NAS for precise rust</article-title>. <source>J. Intell. Fuzzy Syst.</source> <volume>46</volume>, <fpage>3475</fpage>&#x2013;<lpage>3489</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3233/JIFS-236154</pub-id>
</citation></ref>
<ref id="B44">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hu</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Jiang</surname> <given-names>W. Q.</given-names>
</name>
<name>
<surname>Zeng</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Cheng</surname> <given-names>C.</given-names>
</name>
<name>
<surname>He</surname> <given-names>L. C.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>FOTCA: hybrid transformer-CNN architecture using AFNO for accurate plant leaf disease image recognition</article-title>. <source>Front. Plant Sci.</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/FPLS.2023.1231903</pub-id>, PMID: <pub-id pub-id-type="pmid">37771483</pub-id></citation></ref>
<ref id="B45">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hu</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Du</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Jiao</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Class-attention-based lesion proposal convolutional neural network for strawberry diseases identification</article-title>. <source>Front. In. Plant Sci.</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2023.1091600</pub-id>, PMID: <pub-id pub-id-type="pmid">36844049</pub-id></citation></ref>
<ref id="B46">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ilyas</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Jin</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Siddique</surname> <given-names>M. I.</given-names>
</name>
<name>
<surname>Lee</surname> <given-names>S. J.</given-names>
</name>
<name>
<surname>Kim</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Chua</surname> <given-names>L.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>DIANA: A deep learning-based paprika plant disease and pest phenotyping system with disease severity analysis</article-title>. <source>Front. Plant Sci.</source> <volume>13</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/FPLS.2022.983625</pub-id>, PMID: <pub-id pub-id-type="pmid">36275542</pub-id></citation></ref>
<ref id="B47">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Islam</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Azad</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Arman</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Alyami</surname> <given-names>S. A.</given-names>
</name>
<name>
<surname>Hasan</surname> <given-names>M. M.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>PlantCareNet: an advanced system to recognize plant diseases with dual-mode recommendations for prevention</article-title>. <source>Plant Methods</source> <volume>21</volume>, <fpage>52</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s13007-025-01366-9</pub-id>, PMID: <pub-id pub-id-type="pmid">40264213</pub-id></citation></ref>
<ref id="B48">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kalimuthu</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Arivalagan</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Mohan</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Christyraj</surname> <given-names>J. R. S. S.</given-names>
</name>
<name>
<surname>Arockiaraj</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Muthusamy</surname> <given-names>R.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Point of care diagnosis of plant virus: Current trends and prospects</article-title>. <source>Mol. Cell. Probes.</source> <volume>61</volume>, <elocation-id>101779</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.MCP.2021.101779</pub-id>, PMID: <pub-id pub-id-type="pmid">34798294</pub-id></citation></ref>
<ref id="B49">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kanda</surname> <given-names>P. S.</given-names>
</name>
<name>
<surname>Xia</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Kyslytysna</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Owoola</surname> <given-names>E. O.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Tomato leaf disease recognition on leaf images based on fine-tuned residual neural networks</article-title>. <source>Plants</source> <volume>11</volume>, <elocation-id>2935</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/PLANTS11212935</pub-id>, PMID: <pub-id pub-id-type="pmid">36365386</pub-id></citation></ref>
<ref id="B50">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kang</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>W. H.</given-names>
</name>
<name>
<surname>Xia</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>W. B.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>A study on maize leaf pest and disease detection model based on attention and multi-scale features</article-title>. <source>Appl. Sci.</source> <volume>13</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/APP131810441</pub-id>
</citation></ref>
<ref id="B51">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kasana</surname> <given-names>S. S.</given-names>
</name>
<name>
<surname>Rathore</surname> <given-names>A. S</given-names>
</name>
</person-group>. (<year>2024</year>). <article-title>Attention-Based Light Weight Deep Learning Modelsfor Early Potato Disease Detection</article-title>. <source>Applied Sci.</source> <volume>14</volume>, <elocation-id>8038</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/APP14178038</pub-id>
</citation></ref>
<ref id="B52">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Khasawneh</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Faouri</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Fraiwan</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Automatic detection of tomato diseases using deep transfer learning</article-title>. <source>Appl. Sci.</source> <volume>12</volume>, <elocation-id>8467</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/APP12178467</pub-id>
</citation></ref>
<ref id="B53">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Krizhevsky</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Sutskever</surname> <given-names>I.</given-names>
</name>
<name>
<surname>Hinton</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>Im-agenet classification with deep convolutional neural networks</article-title>. <source>Adv. Neural Inf. Process-ing. Syst.</source> <volume>25</volume>, <fpage>1097</fpage>&#x2013;<lpage>1105</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1145/3065386</pub-id>
</citation></ref>
<ref id="B54">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kumar</surname> <given-names>B. M.</given-names>
</name>
<name>
<surname>Bidarakundi</surname> <given-names>P. M.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>Coffee-net: deep mobile patch generation network for coffee leaf disease classification</article-title>. <source>IEEE Sensors. J.</source> <volume>25</volume>, <fpage>7355</fpage>&#x2013;<lpage>7362</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/JSEN.2024.3498050</pub-id>
</citation></ref>
<ref id="B55">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>PM-YOLO: A powdery mildew automatic grading detection model for rubber tree</article-title>. <source>Insects</source> <volume>15</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/insects15120937</pub-id>, PMID: <pub-id pub-id-type="pmid">39769539</pub-id></citation></ref>
<ref id="B56">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Cheng</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Cui</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>C.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>High-performance plant pest and disease detection based on model ensemble with inception module and cluster algorithm</article-title>. <source>Plants</source> <volume>12</volume>, <elocation-id>200</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/PLANTS12010200</pub-id>, PMID: <pub-id pub-id-type="pmid">36616330</pub-id></citation></ref>
<ref id="B57">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Ye</surname> <given-names>Q.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>One-stage disease detection method for maize leaf based on multi-scale feature fusion</article-title>. <source>Appl. Sci.</source> <volume>12</volume>, <elocation-id>7960</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/APP12167960</pub-id>
</citation></ref>
<ref id="B58">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Identification of tea disease under complex backgrounds based on minimalism neural network architecture and channel reconstruction unit</article-title>. <source>IEEE Access.</source> <volume>12</volume>, <fpage>35934</fpage>&#x2013;<lpage>35946</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2024.3373707</pub-id>
</citation></ref>
<ref id="B59">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Peng</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>VLDNet: an ultra-lightweight crop diseaseIdenti- fication network</article-title>. <source>Agriculture</source> <volume>13</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture13081482</pub-id>
</citation></ref>
<ref id="B60">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liang</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Jiang</surname> <given-names>W. P.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>A ResNet50-DPA model for tomato leaf disease identification</article-title>. <source>Front. Plant Sci.</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/FPLS.2023.1258658</pub-id>, PMID: <pub-id pub-id-type="pmid">37908831</pub-id></citation></ref>
<ref id="B61">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lin</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Bai</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Lin</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>TSBA-YOLO: an improved tea diseases detection model based on attention mechanisms and feature fusion</article-title>. <source>Forests</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/f14030619</pub-id>
</citation></ref>
<ref id="B62">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Anguelov</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Erhan</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Szegedy</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Reed</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Fu</surname> <given-names>C. Y.</given-names>
</name>
<etal/>
</person-group>. (<year>2016</year>). <article-title>SSD: Sin-gle shot multibox detector</article-title>. <source>Comput. Vision &#x2013; ECCV.</source> <volume>2016</volume>, <fpage>21</fpage>&#x2013;<lpage>37</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/978-3-319-46448-0_2</pub-id>
</citation></ref>
<ref id="B63">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Bai</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Zheng</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Jiang</surname> <given-names>Q.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>An apple leaf disease identification model for safeguarding apple food safety</article-title>. <source>Food Sci. Technol.</source> <volume>43</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1590/fst.104322</pub-id>
</citation></ref>
<ref id="B64">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Feng</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Feng</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>Data-fusion based on transfer learning for plant disease recognition</article-title>. <source>Inf. Technol. And Control.</source> <volume>54</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.5755/j01.itc.54.1.39520</pub-id>
</citation></ref>
<ref id="B65">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Gao</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Z.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Crop disease recognition based on modified light-weight CNN with attention mechanism</article-title>. <source>IEEE Access</source> <volume>10</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2022.3216285</pub-id>
</citation></ref>
<ref id="B66">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Guo</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zhao</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Liang</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>YOLO-BSMamba: A YOLOv8s-based model for tomato leaf disease detection in complex backgrounds</article-title>. <source>Agronomy-Basel</source> <volume>15</volume>, <elocation-id>870</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy15040870</pub-id>
</citation></ref>
<ref id="B67">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Lv</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Deng</surname> <given-names>L.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Research on maize disease identification methods in complex environments based on cascade networks and two-stage transfer learning</article-title>. <source>Sci. Rep.</source> <volume>12</volume>, <fpage>18914</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/S41598-022-23484-3</pub-id>, PMID: <pub-id pub-id-type="pmid">36344603</pub-id></citation></ref>
<ref id="B68">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Min</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Mei</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Jiang</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Plant disease recogni- tion: A large-scale benchmark dataset and a visual re- gion and loss reweighting approach</article-title>. <source>IEEE Transac-tions. Image. Proc.</source> <volume>30</volume>, <fpage>2003</fpage>&#x2013;<lpage>2015</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TIP.2021.3049334</pub-id>, PMID: <pub-id pub-id-type="pmid">33444137</pub-id></citation></ref>
<ref id="B69">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>Y. X.</given-names>
</name>
<name>
<surname>Su</surname> <given-names>J. Y.</given-names>
</name>
<name>
<surname>Zheng</surname> <given-names>Z. Z.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>D. Z.</given-names>
</name>
<name>
<surname>Song</surname> <given-names>Y. Y.</given-names>
</name>
<name>
<surname>Fang</surname> <given-names>Y. L.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>GLDCNet: A novel convolutional neural network for grapevine leafroll disease recognition using UAV-based imagery</article-title>. <source>Comput. Electron. Agricult.</source> <volume>218</volume>, <elocation-id>108668</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.COMPAG.2024.108668</pub-id>
</citation></ref>
<ref id="B70">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Multisource information fusion method for vegetable disease detection</article-title>. <source>BMC Plant Biol.</source> <volume>24</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s12870-024-05346-4</pub-id>, PMID: <pub-id pub-id-type="pmid">39095689</pub-id></citation></ref>
<ref id="B71">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Zhai</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Xia</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Tomato leaf disease identification method based on improved YOLOX</article-title>. <source>Agronomy</source> <volume>13</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/AGRONOMY13061455</pub-id>
</citation></ref>
<ref id="B72">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>K. C.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>X. J.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>PiTLiD: identification of plant disease from leaf images based on convolutional neural network</article-title>. <source>IEEE/ACM Trans. Comput. Biol. Bioinf.</source> <volume>20</volume>, <fpage>1278</fpage>&#x2013;<lpage>1288</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TCBB.2022.3195291</pub-id>, PMID: <pub-id pub-id-type="pmid">35914052</pub-id></citation></ref>
<ref id="B73">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lobna</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Mahmoud</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Hanaa</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Hassanien</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>An optimized capsule neural networks for tomato leaf disease classification</article-title>. <source>EURASIP Journal on Image and Video Processing</source> <volume>2024</volume>, <elocation-id>2</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/S13640-023-00618-9</pub-id>
</citation></ref>
<ref id="B74">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Long</surname> <given-names>C. F.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>Y. J.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>H. M.</given-names>
</name>
<name>
<surname>Su</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Deng</surname> <given-names>Y. J.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>An approach for detecting tomato under a complicated environment</article-title>. <source>Agronomy-Basel</source> <volume>15</volume>, <fpage>(3)</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy15030667</pub-id>
</citation></ref>
<ref id="B75">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Zhao</surname> <given-names>M. Y.</given-names>
</name>
<name>
<surname>Jiang</surname> <given-names>T. H.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Lightweight corn seed disease identification method based on improved shuffleNetV2</article-title>. <source>Agriculture</source> <volume>12</volume>, <elocation-id>1929</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/AGRICULT-URE12111929</pub-id>
</citation></ref>
<ref id="B76">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Luo</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Xue</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Deng</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Mo</surname> <given-names>Z.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Citrus diseases and pests detection model based on self-attention YOLOV8</article-title>. <source>IEEE Access.</source> <volume>11</volume>, <fpage>139872</fpage>&#x2013;<lpage>139881</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2023.3340148</pub-id>
</citation></ref>
<ref id="B77">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ma</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Meng</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Z. Y.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>G. F.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Multi-plant disease identification based on lightweight resNet18 model</article-title>. <source>Agronomy-Basel</source> <volume>13</volume>, <elocation-id>2702</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy1311270214</pub-id>
</citation></ref>
<ref id="B78">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Marriam</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Nazir</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Javed</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Amin</surname> <given-names>S. T.</given-names>
</name>
<name>
<surname>Jeribi</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Tahir</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>CoffeeNet: A deep learning approach for coffee plant leaves diseases recognition</article-title>. <source>Expert Syst. With. Appl.</source> <volume>237</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.eswa.2023.121481</pub-id>
</citation></ref>
<ref id="B79">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Masood</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Nawaz</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Nazir</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Javed</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Alkanhel</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Elmannai</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>MaizeNet: A deep learning approach for effective recognition of maize plant leaf diseases</article-title>. <source>IEEE Access.</source> <volume>11</volume>, <fpage>52862</fpage>&#x2013;<lpage>52876</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2023.3280260</pub-id>
</citation></ref>
<ref id="B80">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Matteo</surname> <given-names>C.</given-names>
</name>
<name>
<surname>J. Brendan</surname> <given-names>R.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Foundations of deep learning. An introduction to the special issue</article-title>. <source>Cogn. Syst. Res.</source> <volume>87</volume>, <fpage>101246</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.COGSYS.2024.101246</pub-id>
</citation></ref>
<ref id="B81">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Md</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Shakrin</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Osim</surname> <given-names>K.</given-names>
</name>
<etal/>
</person-group> (<year>2023</year>). <article-title>PlantDet: A Robust Multi-Model Ensemble Method Based on Deep Learning For Plant Disease Detection</article-title>. <source>IEEE Access</source> <volume>11</volume>, <fpage>34846</fpage>&#x2013;<lpage>34859</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2023.3264835</pub-id>
</citation></ref>
<ref id="B82">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Meng</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhan</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Yan</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>L.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>A rapid and precise algorithm for maize leaf disease detection based on YOLO MSM</article-title>. <source>Sci. Rep.</source> <volume>15</volume>, <fpage>6016</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41598-025-88399-1</pub-id>, PMID: <pub-id pub-id-type="pmid">39971956</pub-id></citation></ref>
<ref id="B83">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mingle</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Ji</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Jaehwan</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Jucheng</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Sook</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Plant disease recognition datasets in the age of deep learning: challenges and opportunities</article-title>. <source>Front. Plant Sci.</source> <volume>15</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/FPLS.2024.1452551</pub-id>, PMID: <pub-id pub-id-type="pmid">39399537</pub-id></citation></ref>
<ref id="B84">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Nag</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Chanda</surname> <given-names>P. R.</given-names>
</name>
<name>
<surname>Nandi</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Mobile app-based tomato disease identification with fine-tuned convolutional neural networks</article-title>. <source>Comput. Electrical. Eng.</source> <volume>112</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.COMPELECENG.2023.108995</pub-id>
</citation></ref>
<ref id="B85">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Nawaz</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Nazir</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Javed</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Masood</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Rashid</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Kim</surname> <given-names>J.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>A robust deep learning approach for tomato plant leaf disease localization and classification</article-title>. <source>Sci. Rep.</source> <volume>12</volume>, <fpage>18568</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/S41598-022-21498-5</pub-id>, PMID: <pub-id pub-id-type="pmid">36329073</pub-id></citation></ref>
<ref id="B86">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Obsie</surname> <given-names>E. Y.</given-names>
</name>
<name>
<surname>Qu</surname> <given-names>H. C.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Y. J.</given-names>
</name>
<name>
<surname>Annis</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Drummond</surname> <given-names>F.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Yolov5s-CA: an improved yolov5 based on the attention mechanism for mummy berry disease detection</article-title>. <source>Agriculture</source> <volume>13</volume>, <elocation-id>78</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/AGRICULTURE13010078</pub-id>
</citation></ref>
<ref id="B87">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>&#xd6;nler</surname> <given-names>E.</given-names>
</name>
<name>
<surname>K&#xf6;yc&#xfc;</surname> <given-names>N. D.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Wheat powdery mildew detection with YOLOv8 object detection model</article-title>. <source>Appl. Sciences-Basel.</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/app14167073</pub-id>
</citation></ref>
<ref id="B88">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pan</surname> <given-names>N. Y.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>W. M.</given-names>
</name>
<name>
<surname>Luo</surname> <given-names>Y. T.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Y. L.</given-names>
</name>
</person-group> (<year>2024</year>a). <article-title>Identification of leaf disease based on memristor convolutional neural networks</article-title>. <source>IEEE Access</source> <volume>12</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2022.3216285</pub-id>
</citation></ref>
<ref id="B89">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pan</surname> <given-names>N. Y.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>W. M.</given-names>
</name>
<name>
<surname>Luo</surname> <given-names>Y. T.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Y. L.</given-names>
</name>
</person-group> (<year>2024</year>b). <article-title>Identification of leaf disease based on memristor convolutional neural networks</article-title>. <source>IEEE Access.</source> <volume>12</volume>, <fpage>115197</fpage>&#x2013;<lpage>115203</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2024.3444796</pub-id>
</citation></ref>
<ref id="B90">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pandian</surname> <given-names>J. A.</given-names>
</name>
<name>
<surname>Kanchanadevi</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Kumar</surname> <given-names>V. D.</given-names>
</name>
<name>
<surname>Jasi&#x144;ska</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Go&#x148;o</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Leonowicz</surname> <given-names>Z.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>A five convolutional layer deep convolutional neural network for plant leaf disease detection</article-title>. <source>Electronics</source> <volume>11</volume>, <elocation-id>1266</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/ELECTRONICS11081266</pub-id>
</citation></ref>
<ref id="B91">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Prabhjot</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Shilpi</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Rajeev</surname> <given-names>T.</given-names>
</name>
<etal/>
</person-group> (<year>2022</year>). <article-title>Recognition of Leaf Disease Using Hybrid Convolutional Neural Network by Applying Feature Reduction</article-title>. <source>Sensors</source> <volume>22</volume>, <elocation-id>575</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/S22020575</pub-id>, PMID: <pub-id pub-id-type="pmid">35062534</pub-id></citation></ref>
<ref id="B92">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Qiu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Lu</surname> <given-names>X. L.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>X. X.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>Y. Q.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Research on image recognition of tomato leaf diseases based on improved AlexNet model</article-title>. <source>Heliyon</source> <volume>10</volume>, <fpage>e33555</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.HELIYON.2024.E33555</pub-id>, PMID: <pub-id pub-id-type="pmid">39044970</pub-id></citation></ref>
<ref id="B93">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Rashid</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Aslam</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Aziz</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Aldehim</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>A modified mobileNetv3 coupled with inverted residual and channel attention mechanisms for detection of tomato leaf diseases</article-title>. <source>IEEE ACCESS.</source> <volume>13</volume>, <fpage>52683</fpage>&#x2013;<lpage>52696</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2025.3550205</pub-id>
</citation></ref>
<ref id="B94">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Raza</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Pitafi</surname> <given-names>A. H.</given-names>
</name>
<name>
<surname>Shaikh</surname> <given-names>M. K.</given-names>
</name>
<name>
<surname>Ahmed</surname> <given-names>K.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>Optimizing potato leaf disease recognition: Insights DENSE-NET-121 and Gaussian elimination filter fusion</article-title>. <source>Heliyon</source> <volume>11</volume>, <elocation-id>e42318</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.heliyon.2025.e42318</pub-id>, PMID: <pub-id pub-id-type="pmid">39991243</pub-id></citation></ref>
<ref id="B95">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Roeswitawati</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Sutarman</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Indratmi</surname> <given-names>D.</given-names>
</name>
</person-group> (<year>2024</year>). &#x201c;<article-title>Imperata cylindrica root as PGPR pressing the pathogen phytophthora infestant the cause of wilt disease in potato plant</article-title>,&#x201d; in <source>BIO web of conferences</source>, vol. <volume>143</volume>. (<publisher-loc>Paris, France</publisher-loc>: <publisher-name>EDP Sciences</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.1051/BIOCONF/202414301022</pub-id>
</citation></ref>
<ref id="B96">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Rose</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Rui</surname> <given-names>W.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Learning dynamical systems from data: An introduction to physics-guided deep learning</article-title>. <source>Proc. Natl. Acad. Sci. United. States Am.</source> <volume>121</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1073/PNAS.2311808121</pub-id>, PMID: <pub-id pub-id-type="pmid">38913886</pub-id></citation></ref>
<ref id="B97">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sabbir</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Md</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Tasnim</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Sony</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Kabir</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Less is More: Lighter and Faster Deep Neural Architecture for Tomato Leaf Disease Classification</article-title>. <source>IEEE Access</source> <volume>10</volume>, <fpage>2169</fpage>&#x2013;<lpage>3536</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2022.3187203</pub-id>
</citation></ref>
<ref id="B98">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Saidani</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Ghodhbani</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Alhomoud</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Ben</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Embedded Plant Disease Recognition Using Deep PlantNet on FPGA-SoC</article-title>. <source>Comput Inform</source> <volume>42</volume>, <fpage>1378</fpage>&#x2013;<lpage>1403</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.31577/cai_2023_6_1378</pub-id>
</citation></ref>
<ref id="B99">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Samia</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Reem</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Mohammed</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Aziz</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Muthanna</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>MULTINET: A Multi -Agent DRL and EfficientNet Assisted Framework for 3D Plant Leaf Disease Identification and Severity Quantification</article-title>. <source>IEEE Access</source> <volume>11</volume>, <fpage>2169</fpage>&#x2013;<lpage>3536</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2023.3303868</pub-id>
</citation></ref>
<ref id="B100">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shah</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Kashif</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Sara</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Areej</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Mehrez</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Muhammad</surname> <given-names>A. K.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Deep transfer learning based detection and classification of citrus plant diseases</article-title>. <source>Comput. Materials&amp;Continua.</source> <volume>76</volume>, <fpage>895</fpage>&#x2013;<lpage>914</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.32604/CMC.2023.039781</pub-id>
</citation></ref>
<ref id="B101">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shao</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Lu</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>D.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Cotton disease recognition method in natural environment based on convolutional neural network</article-title>. <source>Agriculture-Basel</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture14091577</pub-id>
</citation></ref>
<ref id="B102">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shwetha</surname> <given-names>V.</given-names>
</name>
<name>
<surname>Bhagwat</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Laxmi</surname> <given-names>V.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>LeafSpotNet: A deep learning framework for detecting leaf spot disease in jasmine plants</article-title>. <source>Artif. Intell. Agricult.</source> <volume>12</volume>, <fpage>1</fpage>&#x2013;<lpage>18</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.AIIA.2024.02.002</pub-id>
</citation></ref>
<ref id="B103">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Singh</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Jain</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Jain</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Kayal</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Kumawat</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Batra</surname> <given-names>N.</given-names>
</name>
</person-group> (<year>2020</year>). &#x201c;<article-title>PlantDoc:A dataset for visual plant disease detection</article-title>,&#x201d; in <source>PROCEEDINGS OF THE 7TH ACM IKDD CODS AND 25TH COMAD (CODS-COMAD 2020)</source>. <publisher-loc>(New York, United States</publisher-loc>: <publisher-name>ACM</publisher-name>), <fpage>249</fpage>&#x2013;<lpage>253</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1145/3371158.3371196</pub-id>
</citation></ref>
<ref id="B104">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sk</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Arnab</surname> <given-names>K.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Plant Disease Identification Using a Novel Convolutional Neural Network</article-title>. <source>IEEE Access</source> <volume>10</volume>, <fpage>5390</fpage>&#x2013;<lpage>5401</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2022.3141371</pub-id>
</citation></ref>
<ref id="B105">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Soeb</surname> <given-names>M. J. A.</given-names>
</name>
<name>
<surname>Jubayer</surname> <given-names>M. F.</given-names>
</name>
<name>
<surname>Tarin</surname> <given-names>T. A.</given-names>
</name>
<name>
<surname>Al Mamun</surname> <given-names>M. R.</given-names>
</name>
<name>
<surname>Ruhad</surname> <given-names>F. M.</given-names>
</name>
<name>
<surname>Parven</surname> <given-names>A.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Tea leaf disease detection and identification based on YOLOv7 (YOLO-T)</article-title>. <source>Sci. Rep.</source> <volume>13</volume>, <fpage>6078</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/S41598-023-33270-4</pub-id>, PMID: <pub-id pub-id-type="pmid">37055480</pub-id></citation></ref>
<ref id="B106">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Strandberg</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Andersson</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Berlin</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Plant pathogen infection risk and climate change in the Nordic and Baltic countries</article-title>. <source>Environ. Res. Commun.</source> <volume>6</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1088/2515-7620/ad352a</pub-id>
</citation></ref>
<ref id="B107">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sun</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Feng</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>Z.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Highly accurate and lightweight detection model of apple leaf diseases based on YOLO</article-title>. <source>Agronomy-Basel</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy14061331</pub-id>
</citation></ref>
<ref id="B108">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sun</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Gu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Liang</surname> <given-names>F.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Veg-denseCap: dense captioning model for vegetable leaf disease images</article-title>. <source>Agronomy-Basel</source> <volume>13</volume>, <fpage>1700</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy13071700</pub-id>
</citation></ref>
<ref id="B109">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sun</surname> <given-names>W. B.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>Z. L.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Ru</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>R. B.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>R.</given-names>
</name>
<etal/>
</person-group>. (<year>2025</year>). <article-title>Ultra-lightweight tomatoes disease recognition method based on efficient attention mechanism in complex environment</article-title>. <source>Front. In. Plant Sci.</source> <volume>15</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2024.1491593</pub-id>, PMID: <pub-id pub-id-type="pmid">40017620</pub-id></citation></ref>
<ref id="B110">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tang</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Porle</surname> <given-names>R. R.</given-names>
</name>
<name>
<surname>Yew</surname> <given-names>H. T.</given-names>
</name>
<name>
<surname>Wong</surname> <given-names>F.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>Identification of maize diseases based on dynamic convolution and tri-attention mechanism</article-title>. <source>IEEE Access.</source> <volume>13</volume>, <fpage>6834</fpage>&#x2013;<lpage>6844</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2025.3525661</pub-id>
</citation></ref>
<ref id="B111">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Thai</surname> <given-names>H. T.</given-names>
</name>
<name>
<surname>Le</surname> <given-names>K. H.</given-names>
</name>
<name>
<surname>Nguyen</surname> <given-names>N. L. T.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>EF-CenterNet: An efficient anchor-free model for UAV-based banana leaf disease detection</article-title>. <source>Comput. And Electron. In. Agricult.</source> <volume>231</volume>, <elocation-id>109927</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2025.109927</pub-id>
</citation></ref>
<ref id="B112">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tian</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Duan</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Yuan</surname> <given-names>A.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>VMF-SSD: A novel V-space based multi-scale feature fusion SSD for apple leaf disease detection</article-title>. <source>IEEE-ACM. Trans. On Comput. Biol. And Bioinf.</source> <volume>20</volume>, <fpage>2016</fpage>&#x2013;<lpage>2028</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TCBB.2022.3229114</pub-id>, PMID: <pub-id pub-id-type="pmid">37015544</pub-id></citation></ref>
<ref id="B113">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Vasudevan</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Karthick</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>A Lightweight Depth-Wise Separable Convolution- Based CapsNet for Efficient Grape Leaf Disease Detection</article-title>. <source>tement du signalt</source> <volume>40</volume>, <fpage>2869</fpage>&#x2013;<lpage>2877</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.18280/ts.400648</pub-id>
</citation></ref>
<ref id="B114">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Fu</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Fan</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Xia</surname> <given-names>C.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Sweet potato leaf detection in a natural scene based on faster R-CNN with a visual attention mechanism and DIoU-NMS</article-title>. <source>Ecol. Inf.</source> <volume>73</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.ECOINF.2022.101931</pub-id>
</citation></ref>
<ref id="B115">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>J. Y.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>M. M.</given-names>
</name>
<name>
<surname>Han</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Guo</surname> <given-names>X. D.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>YOLOv8-RCAA: A lightweight and high-performance network for tea leaf disease detection</article-title>. <source>Agriculture-Basel</source> <volume>14</volume>, <elocation-id>1240</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture14081240</pub-id>
</citation></ref>
<ref id="B116">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>H. M.</given-names>
</name>
<name>
<surname>Pan</surname> <given-names>X. Y.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>Y. Y.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>S. Q.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>R. B.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Maize leaf disease recognition based on TC-MRSN model in sustainable agriculture</article-title>. <source>Comput. Electron. Agricult.</source> <volume>221</volume>, <elocation-id>108915</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.COMPAG.2024.108915</pub-id>
</citation></ref>
<ref id="B117">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>F. Y.</given-names>
</name>
<name>
<surname>Rao</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Luo</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Jin</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Jiang</surname> <given-names>Z. H.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>W.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>Practical cucumber leaf disease recognition using improved Swin Transformer and small sample size</article-title>. <source>Comput. Electron. Agric.</source> <volume>199</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.COMPAG.2022.107163</pub-id>
</citation></ref>
<ref id="B118">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>B. B.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>C. X.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Y. Y.</given-names>
</name>
<name>
<surname>Cao</surname> <given-names>C. X.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Gong</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>An ultra-lightweight efcient network for image-based plant disease and pest infection detectio</article-title>. <source>Precis. Agricult.</source> <volume>24</volume>, <fpage>1836</fpage>&#x2013;<lpage>1861</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11119-023-10020-0</pub-id>
</citation></ref>
<ref id="B119">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wu</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Jiang</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Extraction of pine wilt disease regions using UAV RGB imagery and improved mask R-CNN models fused with convNeXt</article-title>. <source>Forests</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/f14081672</pub-id>
</citation></ref>
<ref id="B120">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wu</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>J. L.</given-names>
</name>
<name>
<surname>Jiang</surname> <given-names>M. F.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Ding</surname> <given-names>S. N.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>K. W.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>Tea leaf disease recognition using attention convolutional neural network and handcrafted features</article-title>. <source>Crop Protect.</source> <volume>190</volume>, <elocation-id>107118</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.CROPRO.2025.107118</pub-id>
</citation></ref>
<ref id="B121">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Xu</surname> <given-names>L. X.</given-names>
</name>
<name>
<surname>Cao</surname> <given-names>B. X.</given-names>
</name>
<name>
<surname>Zhao</surname> <given-names>F. J.</given-names>
</name>
<name>
<surname>Ning</surname> <given-names>S. Y.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>W. B.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Wheat leaf disease identification based on deep learning algorithms</article-title>. <source>Physiol. Mol. Plant Pathol.</source> <volume>123</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.PMPP.2022.101940</pub-id>
</citation></ref>
<ref id="B122">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yadav</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Tewari</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>CONF-RCNN: a conformer and faster region-based convolutional neural network model for multi-label classification of tomato leaves disease in real field environment</article-title> <volume>132</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s41348-024-01057-y</pub-id>
</citation></ref>
<ref id="B123">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ye</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Shao</surname> <given-names>G. Q.</given-names>
</name>
<name>
<surname>He</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Gao</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2024</year>a). <article-title>YOLOv8-RMDA: lightweight YOLOv8 network for early detection of small target diseases in tea</article-title>. <source>Sensors</source> <volume>24</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s24092896</pub-id>, PMID: <pub-id pub-id-type="pmid">38733002</pub-id></citation></ref>
<ref id="B124">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ye</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Shao</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Gao</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2024</year>b). <article-title>Detection model of tea disease severity under low light intensity based on YOLOv8 and enlightenGAN</article-title>. <source>Plants-Basel</source> <volume>13</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/plants13101377</pub-id>, PMID: <pub-id pub-id-type="pmid">38794447</pub-id></citation></ref>
<ref id="B125">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yi</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Shi</surname> <given-names>J.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>HHS-RT-DETR: A method for the detection of citrus greening disease</article-title>. <source>Agronomy</source> <volume>14</volume>, <fpage>2900</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy14122900</pub-id>
</citation></ref>
<ref id="B126">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yin</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>W. H.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Yi</surname> <given-names>L. L.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Recognition of grape leaf diseases using MobileNetV3 and deep transfer learning</article-title>. <source>Int. J. Agric. Biol. Eng.</source> <volume>15</volume>, <fpage>184</fpage>&#x2013;<lpage>194</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.25165/j.ijabe.20221503.7062</pub-id>
</citation></ref>
<ref id="B127">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yin</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Geng</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Xi</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zeng</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Si</surname> <given-names>C.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>A high-precision jujube disease spot detection based on SSD during the sorting process</article-title>. <source>PloSone</source> <volume>19</volume>, <fpage>e0296314</fpage>&#x2013;<lpage>e0296314</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/JOURNAL.PONE.0296314</pub-id>, PMID: <pub-id pub-id-type="pmid">38180957</pub-id></citation></ref>
<ref id="B128">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yu</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Ma</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Guan</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>A recognition method of soybean leaf diseases based on an improved deep learning model</article-title>. <source>Front. Plant Sci.</source> <volume>13</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/FPLS.2022.878834</pub-id>, PMID: <pub-id pub-id-type="pmid">35712600</pub-id></citation></ref>
<ref id="B129">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yuan</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Zou</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Luo</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Zheng</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>L.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>A lightweight pine wilt disease detection method based on vision transformer-enhanced YOLO</article-title>. <source>Forests</source> <volume>15</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/f15061050</pub-id>
</citation></ref>
<ref id="B130">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>D. Y.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Ma</surname> <given-names>M. Y.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Detecting tomato disease types and degrees using multi-branch and destruction learning</article-title>. <source>Comput. Electron. Agric.</source> <volume>213</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.COMPAG.2023.108244</pub-id>
</citation></ref>
<ref id="B131">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>T. H.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>W. Z.</given-names>
</name>
<name>
<surname>Yuan</surname> <given-names>C. C.</given-names>
</name>
<name>
<surname>Seng</surname> <given-names>X. Y.</given-names>
</name>
<name>
<surname>Guo</surname> <given-names>T. T.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>YOLO-CRD: A lightweight model for the detection of rice diseases in natural environments</article-title>. <source>Phyton-Internat. J. Exp. Bot.</source> <volume>93</volume>, <fpage>1275</fpage>&#x2013;<lpage>1296</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.32604/PHYTON.2024.052397</pub-id>
</citation></ref>
<ref id="B132">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Lu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Zhao</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Optimal training strategy for high-performance detection model of multi-cultivar tea shoots based on deep learning methods</article-title>. <source>Sci. Hortic.</source> <volume>328</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.SCIENTA.2024.112949</pub-id>
</citation></ref>
<ref id="B133">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Tao</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Cheng</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Qiao</surname> <given-names>H.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>GSD-YOLO: A lightweight decoupled wheat scab spore detection network based on yolov7-tiny</article-title>. <source>Agriculture-Basel</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture14122278</pub-id>
</citation></ref>
<ref id="B134">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>R. F.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Jiang</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Peng</surname> <given-names>J. L.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>H. L.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>IBSANet: A network for tomato leaf disease identification based on transfer learning with small samples</article-title>. <source>Appl. Sci.</source> <volume>13</volume>, <elocation-id>4348</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/APP13074348</pub-id>
</citation></ref>
<ref id="B135">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Jiang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>L.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>JutePest-YOLO: A deep learning network for jute pest identification and detection</article-title>. <source>IEEE Access.</source> <volume>12</volume>, <fpage>72938</fpage>&#x2013;<lpage>72956</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2024.3403491</pub-id>
</citation></ref>
<ref id="B136">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>E. X.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Lv</surname> <given-names>C.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>A lightweight dual-attention network for tomato leaf disease identification</article-title>. <source>Front. Plant Sci.</source> <volume>15</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/FPLS.2024.1420584</pub-id>, PMID: <pub-id pub-id-type="pmid">39166234</pub-id></citation></ref>
<ref id="B137">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>Y. K.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>G. X.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>A. B.</given-names>
</name>
<name>
<surname>He</surname> <given-names>M. F.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>Y. H.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>A precise apple leaf diseases detection using BCTNet under unconstrained environments</article-title>. <source>Comput. Electron. Agric.</source> <volume>212</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.COMPAG.2023.108132</pub-id>
</citation></ref>
<ref id="B138">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhao</surname> <given-names>Y. F.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Gao</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Song</surname> <given-names>W. L.</given-names>
</name>
<name>
<surname>Xiong</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>J. F.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Plant disease detection using generated leaves based on doubleGAN</article-title>. <source>IEEE/ACM Trans. Comput. Biol. Bioinf.</source> <volume>19</volume>, <fpage>1817</fpage>&#x2013;<lpage>1826</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TCBB.2021.3056683</pub-id>, PMID: <pub-id pub-id-type="pmid">33534712</pub-id></citation></ref>
<ref id="B139">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhao</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Multiple disease detection method for greenhouse- cultivated strawberry based on multiscale feature fusion Faster R_CNN</article-title>. <source>Comput. Electron. Agric.</source> <volume>199</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.COMPAG.2022.107176</pub-id>
</citation></ref>
<ref id="B140">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhou</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Lai</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Cai</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>B.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Automatic detection of rice blast fungus spores by deep learning-based object detection: models benchmarks and quantitative analysis</article-title>. <source>Agriculture</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/AGRICULTURE14020290</pub-id>
</citation></ref>
<ref id="B141">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhou</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Krahenbuhl</surname> <given-names>P.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Objects as points</article-title>. <source>Comput. Vision Pattern Recognit.</source> <volume>12</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.1904.07850</pub-id>
</citation></ref>
<ref id="B142">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhu</surname> <given-names>J. X.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>J. H.</given-names>
</name>
<name>
<surname>He</surname> <given-names>H. Y.</given-names>
</name>
<name>
<surname>Bai</surname> <given-names>W. H.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2025</year>a). <article-title>CBACA-YOLOv5: A symmetric and asymmetric attention-driven detection framework for citrus leaf disease identification</article-title>. <source>Symmetry-. Basel.</source> <volume>17</volume>, <elocation-id>617</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/sym17040617</pub-id>
</citation></ref>
<ref id="B143">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Qiu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2025</year>b). <article-title>An application of YOLOv8 integrated with attention mechanisms for detection of grape leaf black rot spots</article-title>. <source>PloS One</source> <volume>20</volume>, <fpage>e0321788</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pone.0321788</pub-id>, PMID: <pub-id pub-id-type="pmid">40233077</pub-id></citation></ref>
</ref-list>
<glossary>
<title>Glossary</title>
<def-list>
<def-item>
<term>IEEE</term>
<def>
<p>the institute of electrical and electronics engineers</p>
</def>
</def-item>
<def-item>
<term>CVPR</term>
<def>
<p>computer vision and pattern recognition</p>
</def>
</def-item>
<def-item>
<term>IDADP</term>
<def>
<p>agricultural disease and pest research database</p>
</def>
</def-item>
<def-item>
<term>PDDB</term>
<def>
<p>plant disease symptom database</p>
</def>
</def-item>
<def-item>
<term>AdaBoost</term>
<def>
<p>adaptive Boosting</p>
</def>
</def-item>
<def-item>
<term>SVM</term>
<def>
<p>support vector machine</p>
</def>
</def-item>
<def-item>
<term>R-CNN</term>
<def>
<p>region-based Convolutional Neural Networks</p>
</def>
</def-item>
<def-item>
<term>YOLO</term>
<def>
<p>you only look once</p>
</def>
</def-item>
<def-item>
<term>SSD</term>
<def>
<p>single shot multi-box detector</p>
</def>
</def-item>
<def-item>
<term>PWD</term>
<def>
<p>pine wilt disease</p>
</def>
</def-item>
<def-item>
<term>RPN</term>
<def>
<p>region proposal network</p>
</def>
</def-item>
<def-item>
<term>RoIAlign</term>
<def>
<p> region of interest align</p>
</def>
</def-item>
<def-item>
<term>RoIPool</term>
<def>
<p>region of Interest Pooling</p>
</def>
</def-item>
<def-item>
<term>CBAM</term>
<def>
<p>convolutional block attention module</p>
</def>
</def-item>
<def-item>
<term>CNN</term>
<def>
<p>convolutional neural network</p>
</def>
</def-item>
<def-item>
<term>VGG</term>
<def>
<p>visual geometry group</p>
</def>
</def-item>
<def-item>
<term>ECA</term>
<def>
<p>efficient channel attention</p>
</def>
</def-item>
<def-item>
<term>IoU</term>
<def>
<p>intersection over union</p>
</def>
</def-item>
<def-item>
<term>SE</term>
<def>
<p>squeeze and excitation</p>
</def>
</def-item>
<def-item>
<term>mAP@0.5</term>
<def>
<p>mean average precision at IoU=0.5</p>
</def>
</def-item>
<def-item>
<term>API</term>
<def>
<p>application programming interface</p>
</def>
</def-item>
<def-item>
<term>CapsNet</term>
<def>
<p>capsule neural network</p>
</def>
</def-item>
<def-item>
<term>DCNN</term>
<def>
<p>deep convolutional neural network</p>
</def>
</def-item>
<def-item>
<term>IOT</term>
<def>
<p>internet of things</p>
</def>
</def-item>
<def-item>
<term>AI</term>
<def>
<p>artificial intelligence</p>
</def>
</def-item>
<def-item>
<term>NAS</term>
<def>
<p>neural architecture search.</p>
</def>
</def-item>
</def-list>
</glossary>
</back>
</article>