<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="review-article" dtd-version="1.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Plant Sci.</journal-id>
<journal-title-group>
<journal-title>Frontiers in Plant Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Plant Sci.</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">1664-462X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpls.2025.1737208</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Review</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Lightweight deep learning for tomato disease detection: trends, challenges, and edge AI perspectives</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Gunasekaran</surname><given-names>Harshinisree</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3259953/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Rajkumar</surname><given-names>Sujatha</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>*</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Kirubhadharsini B.</surname><given-names>Lincy</given-names></name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>*</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3036608/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
</contrib-group>
<aff id="aff1"><label>1</label><institution>School of Biosciences and Technology (SBST), Vellore Institute of Technology (VIT)</institution>, <city>Vellore</city>,&#xa0;<country country="in">India</country></aff>
<aff id="aff2"><label>2</label><institution>VIT School of Electronics Engineering (SENSE), Vellore Institute of Technology (VIT)</institution>, <city>Vellore</city>,&#xa0;<country country="in">India</country></aff>
<aff id="aff3"><label>3</label><institution>VIT School of Agricultural Innovations and Advanced Learning (VAIAL), Vellore Institute of Technology (VIT)</institution>, <city>Vellore</city>,&#xa0;<country country="in">India</country></aff>
<author-notes>
<corresp id="c001"><label>*</label>Correspondence: Sujatha Rajkumar, <email xlink:href="mailto:sujatha.r@vit.ac.in">sujatha.r@vit.ac.in</email>; Lincy Kirubhadharsini B., <email xlink:href="mailto:lincy.b@vit.ac.in">lincy.b@vit.ac.in</email></corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-02-12">
<day>12</day>
<month>02</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2025</year>
</pub-date>
<volume>16</volume>
<elocation-id>1737208</elocation-id>
<history>
<date date-type="received">
<day>03</day>
<month>11</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>29</day>
<month>12</month>
<year>2025</year>
</date>
<date date-type="rev-recd">
<day>14</day>
<month>12</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2026 Gunasekaran, Rajkumar and Kirubhadharsini B..</copyright-statement>
<copyright-year>2026</copyright-year>
<copyright-holder>Gunasekaran, Rajkumar and Kirubhadharsini B.</copyright-holder>
<license>
<ali:license_ref start_date="2026-02-12">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<p>Tomato (<italic>Solanum lycopersicum</italic>) is a globally cultivated horticultural crop, yet its productivity is severely constrained by foliar and insect-vectored diseases that reduce its quality and production. Early and accurate diagnosis of these diseases, along with sustainable biocontrol strategies, is essential for improving crop health and reducing economic losses. This review synthesizes and evaluates the recent progress in lightweight deep learning models and edge AI for tomato disease detection, highlighting their potential for practical deployment in precision agriculture. A comprehensive survey of recent literature was conducted, which covers convolutional neural networks, transformer-based models, optimization techniques including pruning, quantization, and knowledge distillation, and use of explainable AI tools to enhance transparency and trust. In addition, experimental validation was performed by utilizing MobileNetV2 and EfficientNetB0 on a subset of tomato diseases that are most common and prevalent in Tamil Nadu. The test performance of both the models resulted in an overall accuracy of 99.9% and macro-F1 nearly 0.99. Further, a unique framework that combines AI-powered diagnosis with microbial biocontrol recommendations is proposed offering a solution to manage diseases in both eco-friendly and region-specific way. Overall, this work provides a roadmap for combining sustainable methods with AI-driven diagnosis, promoting resilient, scalable, and farmer-friendly agricultural systems.</p>
</abstract>
<kwd-group>
<kwd>deep learning</kwd>
<kwd>edge AI</kwd>
<kwd>insect vectored disease</kwd>
<kwd>lightweight models</kwd>
<kwd>tomato leaf disease</kwd>
<kwd>transformer models</kwd>
<kwd>uncertainty quantification</kwd>
</kwd-group>
<funding-group>
<funding-statement>The author(s) declared that financial support was not received for this work and/or its publication.</funding-statement>
</funding-group>
<counts>
<fig-count count="8"/>
<table-count count="11"/>
<equation-count count="0"/>
<ref-count count="66"/>
<page-count count="19"/>
<word-count count="10146"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Sustainable and Intelligent Phytoprotection</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>Tomato (<italic>Solanum lycopersicum</italic>) is a significant agricultural crop that is extensively grown for its nutritional value, economic significance and use in both fresh and processed food systems. It is an essential crop to rural livelihoods and food security particularly in nations like India and it grows across a variety of agro-climatic zones (<xref ref-type="bibr" rid="B45">Mohanty et&#xa0;al., 2016</xref>). Although there are improvements in agronomic practices and input management, tomato production is hindered by a variety of foliar and insect-vectored diseases such as early blight (<italic>Alternaria solani</italic>), bacterial spot (<italic>Xanthomonas</italic> spp.), septoria leaf spot (<italic>Septoria lycopersici</italic>), and tomato yellow leaf curl virus (TYLCV) (<xref ref-type="bibr" rid="B14">Durmu&#x15f; et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B18">Fuentes et&#xa0;al., 2017</xref>).</p>
<p>Changing climatic patterns, monoculture systems and the increased resistance of pests and pathogens to chemical controls, contributes to improved frequency and severity of these diseases (<xref ref-type="bibr" rid="B43">Ma et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B5">Bebber et&#xa0;al., 2013</xref>; <xref ref-type="bibr" rid="B19">Garrett et&#xa0;al., 2006</xref>). In countries like India, millions of smallholder farmers practise tomato farming, where delayed disease detection often results in substantial yield losses, reduced fruit quality and unforeseen financial difficulties.</p>
<p>Earlier, disease diagnosis often depends on manual inspection by experienced personnel, that is labor-intensive, time-consuming and mostly inconsistent in field conditions. Also, the lack of trained professionals in rural and remote areas, resulted in overuse of agrochemicals as well as delayed or inaccurate disease diagnosis (<xref ref-type="bibr" rid="B3">Ahmed et&#xa0;al., 2022</xref>). Despite the continued usage of chemical pesticides, their environmental risks and decreasing effectiveness due to resistance have forced to look for more sustainable and intelligent solutions (<xref ref-type="bibr" rid="B52">Pretty and Bharucha, 2015</xref>).</p>
<p>Deep Learning (DL) has become a breakthrough in plant disease diagnostics over the last ten years, allowing for the automated, highly accurate diagnosis of leaf symptoms from digital images. A broad range of tomato diseases have been classified using Convolutional neural networks (CNNs) like ResNet, VGG and MobileNet (<xref ref-type="bibr" rid="B16">Ferentinos, 2018</xref>; <xref ref-type="bibr" rid="B61">Too et&#xa0;al., 2019</xref>). In recent years, vision transformers (ViTs) and hybrid models has gained attention due to their ability to extract global feature representations and long-range dependencies resulting in classification robustness under variable conditions (<xref ref-type="bibr" rid="B47">Nishankar et&#xa0;al., 2025</xref>; <xref ref-type="bibr" rid="B4">Alshammari, 2024</xref>).</p>
<p>However, the computational demands of these models are a major obstacle to the widespread adoption of these models. Most high-performing DL models are optimized for powerful GPUs or cloud environments, which are often inaccessible in the field (<xref ref-type="bibr" rid="B28">Howard et&#xa0;al., 2017</xref>). This has led to a shift in focus toward lightweight architectures and optimization strategies such as pruning (removing unnecessary model parameters to reduce size and speed up inference), quantization (reducing the numerical precision of model parameters to speed up inference on edge devices) and knowledge distillation (a method where a smaller model learns from a larger, high-capacity model to improve efficiency) that enable real-time inference on low-power, embedded edge devices like smartphones, Raspberry Pi and NVIDIA Jetson Nano (<xref ref-type="bibr" rid="B22">Han et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B28">Howard et&#xa0;al., 2017</xref>).</p>
<p>In Indian agriculture, edge AI-based diagnostic tools has immense potential. Their offline operation, portability and affordability make them ideal for use in rural farming communities where the Internet connectivity is inconsistent and infrastructure is minimal. The opportunity to make these tools available as mobile applications tailored in local languages can help farmers get the right information at the right time to make timely decisions.</p>
<p>Even with these significant advancements, current AI models often suffer from dataset imbalance, image variability, poor generalization across field environments, and overlapping visual symptoms. According to <xref ref-type="bibr" rid="B46">Mustofa et&#xa0;al., 2023</xref>, most benchmark datasets utilized for training have low diversity and are frequently recorded in controlled environment, which restricts their application in real-world. As a result, gathering field representative datasets and validating models in real-world settings are increasingly important.</p>
<p>Furthermore, as these deep learning models are involved in important agricultural decisions, their successful adoption depends on their transparency and adaptability, so that farmers can trust and interpret its decisions. Researchers and practitioners may visualize model attention and determine which region of the leaf influences predictions with tools like Grad-CAM and SHAP, increasing model transparency and user trust (<xref ref-type="bibr" rid="B56">Selvaraju et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B42">Lundberg and Lee, 2017</xref>).</p>
<p>Integration of AI diagnostics with microbial biocontrol strategies is another crucial part but unexplored aspect of tomato disease management. Beneficial microorganisms including <italic>Bacillus subtilis</italic>, <italic>Trichoderma</italic> spp., and <italic>Beauveria bassiana</italic> have shown great potential in suppressing plant pathogens and insect vectors while boosting plant resistance (<xref ref-type="bibr" rid="B10">Chowdhury et&#xa0;al., 2015</xref>; <xref ref-type="bibr" rid="B37">Lacey et&#xa0;al., 2015</xref>). Eco-friendly and closed loop disease management, especially in sustainable and organic farming systems could be supported by combining real-time disease detection with targeted biocontrol applications.</p>
<p>Furthermore, recent studies indicates a connection between the dynamics of pest infestation and plant brix levels or sugar content. This link shows the possibility of integrated monitoring frameworks that assist early warning systems and proactive biocontrol deployment by combining plant physiological indicators with image-based disease identification.</p>
<p>This review intends to address the important gaps and opportunities in AI-based tomato disease detection through the following objectives:</p>
<list list-type="bullet">
<list-item>
<p>To provide a detailed analysis of lightweight deep learning architectures developed for tomato disease diagnosis.</p></list-item>
<list-item>
<p>To offer a comparative overview of CNNs and transformer-based models in plant pathology.</p></list-item>
<list-item>
<p>To outline and analyze the benchmark datasets commonly used in tomato disease detection research.</p></list-item>
<list-item>
<p>To evaluate the practical feasibility and potential of edge AI deployments for in-field diagnosis.</p></list-item>
<list-item>
<p>To integrate perspectives on AI-driven diagnostics with microbial biocontrol strategies for sustainable disease management.</p></list-item>
<list-item>
<p>To highlight future research directions for the development of robust, scalable and farmer-accessible diagnostic tools tailored to tomato crop health monitoring.</p></list-item>
</list>
<p><xref ref-type="fig" rid="f1"><bold>Figure&#xa0;1</bold></xref> shows the structure of the tomato plant and the main areas affected by foliar pathogens and insect vectors.</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>Schematic diagram of the tomato plant showing disease-prone regions by type of pathogen. <bold>(a)</bold> Young apical leaves are common entry points for insect-vectored viral infections such as TYLCV and TSWV. <bold>(b)</bold> Leaves are primary targets of fungal pathogens (e.g., early blight, septoria) and bacterial (e.g., bacterial spot, speck) diseases. <bold>(c)</bold> Fruits are affected by secondary fungal and bacterial pathogens. <bold>(d)</bold> Stems and vascular tissues are particularly susceptible to bacterial wilt.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1737208-g001.tif">
<alt-text content-type="machine-generated">Illustration of a tomato plant with four parts labeled using arrows: (a) points to the upper leaves, (b) points to flowering stems, (c) points to red tomatoes, and (d) points to the main stem above the root system.</alt-text>
</graphic></fig>
</sec>
<sec id="s2">
<label>2</label>
<title>Background</title>
<p>Tomato (<italic>Solanum lycopersicum</italic>) is cultivated worldwide and holds considerable economic importance globally due to its high nutritional content and versatility in both fresh and processed forms. However, tomato cultivation is frequently challenged by a variety of diseases that affect various plant organs, including leaves, stems, and fruits. These diseases, are mostly triggered by fungal, bacterial, and viral pathogens, which results in severe yield losses, decreased fruit quality and significant financial losses for farmers.</p>
<p>Global agricultural estimates indicate that tomato crop diseases cause an annual production loss of up to 30%, with economic damages amounting to billions of dollars globally (<xref ref-type="bibr" rid="B17">FAO, 2021</xref>). Outbreaks of devastating diseases such as late blight and TYLCV can result in complete crop failure in affected regions. In addition to yield loss, indirect factors like reduced fruit quality, increased pesticide reliance, added labor, and regulatory hurdles significantly increase the financial pressure on both smallholder and commercial farmers.</p>
<sec id="s2_1">
<label>2.1</label>
<title>Major disease categories and their impact</title>
<p>Tomato diseases can be broadly categorized according to their causal organisms such as fungal, bacterial and insect-transmitted viral pathogens. Fungal diseases such as early blight (<italic>Alternaria solani</italic>), late blight (<italic>Phytophthora infestans</italic>), septoria leaf spot (<italic>Septoria lycopersici</italic>), leaf mold (<italic>Fulvia fulva</italic>), and Fusarium wilt (<italic>Fusarium oxysporum</italic> f. sp. <italic>lycopersici</italic>) primarily affect the foliage and vascular systems, leading to defoliation, wilting and fruit rot (<xref ref-type="bibr" rid="B33">Jones et&#xa0;al., 2014</xref>; <xref ref-type="bibr" rid="B48">Nowicki et&#xa0;al., 2012</xref>).</p>
<p>Bacterial infections including bacterial spot (<italic>Xanthomonas</italic> spp.), bacterial speck (<italic>Pseudomonas syringae</italic>), bacterial canker (<italic>Clavibacter michiganensis</italic>), and bacterial wilt (<italic>Ralstonia solanacearum</italic>) disrupt photosynthesis, affects the vascular flow, and reduce overall plant vigor (<xref ref-type="bibr" rid="B26">Hayward, 1991</xref>; <xref ref-type="bibr" rid="B59">Timilsina et&#xa0;al., 2020</xref>). Insect-vectored viral diseases, such as Tomato Yellow Leaf Curl Virus (TYLCV), Tomato Spotted Wilt Virus (TSWV), and Tomato Chlorosis Virus (ToCV), spread rapidly through vectors like whiteflies and thrips, causing symptoms like leaf curling, interveinal chlorosis, and stunted growth (<xref ref-type="bibr" rid="B23">Hanssen et&#xa0;al., 2010</xref>; <xref ref-type="bibr" rid="B38">Lapidot and Friedmann, 2002</xref>).</p>
<p><xref ref-type="fig" rid="f2"><bold>Figure&#xa0;2</bold></xref> shows the visual categorization of tomato plant diseases and <xref ref-type="table" rid="T1"><bold>Table&#xa0;1</bold></xref> summarizes their symptoms, mode of transmission and geographical relevance.</p>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>Major tomato plant diseases categorized by type of pathogen.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1737208-g002.tif">
<alt-text content-type="machine-generated">Infographic showing types of tomato plant diseases categorized as fungal, bacterial, or viral. Fungal includes early blight, septoria leaf spot, late blight, leaf mold, fusarium wilt; bacterial includes bacterial spot, speck, canker, and wilt; viral includes tomato yellow leaf curl virus, tomato spotted wilt virus, tomato chlorosis virus. Illustrations of a green fungus, pink bacteria, and orange virus represent each category.</alt-text>
</graphic></fig>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Summary of major tomato plant diseases.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Disease</th>
<th valign="middle" align="center">Causal organism</th>
<th valign="middle" align="center">Category</th>
<th valign="middle" align="center">Key symptoms</th>
<th valign="middle" align="center">Transmission</th>
<th valign="middle" align="center">Geographical impact</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">Early Blight</td>
<td valign="middle" align="left"><italic>Alternaria solani</italic></td>
<td valign="middle" align="left">Fungal</td>
<td valign="middle" align="left">Concentric brown spots on lower leaves; defoliation</td>
<td valign="middle" align="left">Airborne spores</td>
<td valign="middle" align="left">Global, especially warm regions</td>
</tr>
<tr>
<td valign="middle" align="left">Late Blight</td>
<td valign="middle" align="left"><italic>Phytophthora infestans</italic></td>
<td valign="middle" align="left">Oomycete (fungal-like)</td>
<td valign="middle" align="left">Water-soaked lesions with white mold underside</td>
<td valign="middle" align="left">Air and rain-borne spores</td>
<td valign="middle" align="left">Cool, moist areas</td>
</tr>
<tr>
<td valign="middle" align="left">Septoria Leaf Spot</td>
<td valign="middle" align="left"><italic>Septoria lycopersici</italic></td>
<td valign="middle" align="left">Fungal</td>
<td valign="middle" align="left">Numerous small, gray-centered leaf spots</td>
<td valign="middle" align="left">Rain splash, wind</td>
<td valign="middle" align="left">Humid climates worldwide</td>
</tr>
<tr>
<td valign="middle" align="left">Leaf Mold</td>
<td valign="middle" align="left"><italic>Fulvia fulva</italic></td>
<td valign="middle" align="left">Fungal</td>
<td valign="middle" align="left">Yellow patches, olive mold on leaf underside</td>
<td valign="middle" align="left">Wind-dispersed spores</td>
<td valign="middle" align="left">Greenhouse and humid regions</td>
</tr>
<tr>
<td valign="middle" align="left">Fusarium Wilt</td>
<td valign="middle" align="left"><italic>Fusarium oxysporum</italic> f. sp. <italic>lycopersici</italic></td>
<td valign="middle" align="left">Fungal</td>
<td valign="middle" align="left">Yellowing, wilting of lower leaves; vascular browning</td>
<td valign="middle" align="left">Soil-borne</td>
<td valign="middle" align="left">Warm tropics and subtropics</td>
</tr>
<tr>
<td valign="middle" align="left">Bacterial Spot</td>
<td valign="middle" align="left"><italic>Xanthomonas</italic> spp.</td>
<td valign="middle" align="left">Bacterial</td>
<td valign="middle" align="left">Brown lesions on leaves, fruits; leaf drop</td>
<td valign="middle" align="left">Splashing water, tools</td>
<td valign="middle" align="left">Worldwide</td>
</tr>
<tr>
<td valign="middle" align="left">Bacterial Speck</td>
<td valign="middle" align="left"><italic>Pseudomonas syringae</italic> pv. <italic>tomato</italic></td>
<td valign="middle" align="left">Bacterial</td>
<td valign="middle" align="left">Tiny black spots on fruit and foliage</td>
<td valign="middle" align="left">Rain splash, seed-borne</td>
<td valign="middle" align="left">Global</td>
</tr>
<tr>
<td valign="middle" align="left">Bacterial Canker</td>
<td valign="middle" align="left"><italic>Clavibacter michiganensis</italic> subsp. <italic>michiganensis</italic></td>
<td valign="middle" align="left">Bacterial</td>
<td valign="middle" align="left">Wilting, edge burn, cankers on stems</td>
<td valign="middle" align="left">Seed-borne, wounds</td>
<td valign="middle" align="left">North America, Europe</td>
</tr>
<tr>
<td valign="middle" align="left">Bacterial Wilt</td>
<td valign="middle" align="left"><italic>Ralstonia solanacearum</italic></td>
<td valign="middle" align="left">Bacterial</td>
<td valign="middle" align="left">Sudden wilting, vascular browning, bacterial ooze</td>
<td valign="middle" align="left">Soil and water</td>
<td valign="middle" align="left">Asia, Africa, tropics</td>
</tr>
<tr>
<td valign="middle" align="left">TYLCV</td>
<td valign="middle" align="left">Tomato Yellow Leaf Curl Virus</td>
<td valign="middle" align="left">Viral</td>
<td valign="middle" align="left">Leaf curling, yellowing, stunting</td>
<td valign="middle" align="left">Whitefly vector</td>
<td valign="middle" align="left">Asia, Africa, Americas</td>
</tr>
<tr>
<td valign="middle" align="left">TSWV</td>
<td valign="middle" align="left">Tomato Spotted Wilt Virus</td>
<td valign="middle" align="left">Viral</td>
<td valign="middle" align="left">Necrotic spots, bronzing, ring spots</td>
<td valign="middle" align="left">Thrips vector</td>
<td valign="middle" align="left">Worldwide</td>
</tr>
<tr>
<td valign="middle" align="left">ToCV</td>
<td valign="middle" align="left">Tomato Chlorosis Virus</td>
<td valign="middle" align="left">Viral</td>
<td valign="middle" align="left">Interveinal chlorosis, leaf thickening</td>
<td valign="middle" align="left">Whitefly vector</td>
<td valign="middle" align="left">Europe, Americas, Asia</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Disease distribution patterns are significantly influenced by regional climatic conditions and crop management practices. For example, fungal diseases like early blight and septoria flourish in temperate and humid conditions whereas in tropical and sub-tropical zones, bacterial wilt and TYLCV will be high due to excessive humidity and vector activity. The alter in pathogen life cycles and extended vector habitats due to climate change, also contributes to the onset of diseases (<xref ref-type="bibr" rid="B19">Garrett et&#xa0;al., 2006</xref>).</p>
</sec>
<sec id="s2_2">
<label>2.2</label>
<title>Limitations of traditional disease management</title>
<p>Manual scouting, visual symptom evaluation and laboratory-based confirmation using PCR, ELISA or other culturing techniques are the primary approaches in traditional disease diagnosis. Due to the subjective nature of human observation and the similarity of symptoms across diseases, these methods are labor-intensive, time-consuming and are often incorrect (<xref ref-type="bibr" rid="B7">Bock et&#xa0;al., 2010</xref>). Moreover, many rural and low-resourse agricultural areas lack access to proper diagnostic labs and plant pathology experts.</p>
<p>And for quick disease suppression, farmers always turn to chemical pesticides. But, excessive and improper use of agrochemicals has increased pest resistance and contributed to environmental pollution, and consumers demand for chemical-free alternatives is still growing (<xref ref-type="bibr" rid="B51">Pimentel and Burgess, 2014</xref>). Climate variability complicates the situation even more, by altering the pathogen life cycles and insect vector dynamics, making disease outbreaks more unpredictable.</p>
<p>The tomato&#x2019;s widespread cultivation, rich image-based symptom manifestation and economic significance has made it a model crop for AI-based plant disease diagnosis. Additionally, a number of publicly accessible annotated datasets like PlantVillage, Tomato Leaf Disease Dataset, and TDDS provide standardized resources for training and benchmarking deep learning models, making tomato a perfect model system for assessing AI methods in agriculture.</p>
</sec>
<sec id="s2_3">
<label>2.3</label>
<title>Rise of deep learning in plant pathology</title>
<p>Recent advances artificial intelligence (AI), especially deep learning (DL), has revolutionized disease diagnosis in the agriculture industry. Convolutional Neural Networks (CNNs), in particular have been widely utilized to accurately identify and categorize plant diseases from leaf images with high accuracy (<xref ref-type="bibr" rid="B45">Mohanty et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B16">Ferentinos, 2018</xref>). These models are useful tools for automating plant health monitoring, because they can recognize intricate visual patterns including color, texture and lesion shape.</p>
<p>Despite their popularity, traditional deep learning models sometimes require powerful GPUs and cloud-based servers due to their high processing demands. Their adoption in field settings is constrained by these limitations, especially in locations with limited resources (<xref ref-type="bibr" rid="B34">Kamilaris and Prenafeta-Bold&#xfa;, 2018</xref>). As a result, research interest in lightweight architectures such as MobileNet, EfficientNet-Lite and SqueezeNet has increased.</p>
</sec>
<sec id="s2_4">
<label>2.4</label>
<title>Edge AI and lightweight deep learning</title>
<p>Edge AI is deploying AI models directly on devices such as smartphones, drones, or microcontrollers (e.g., Raspberry Pi, Jetson Nano) without the need for the Internet or cloud access. Edge AI in agriculture, facilitates faster disease detection, minimizes need on cloud connectivity and continues to be useful even in remote farming areas. To support edge deployment, researchers are applying model compression techniques such as pruning, quantization and knowledge distillation to reduce the model size and energy demand while retaining performance (<xref ref-type="bibr" rid="B22">Han et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B9">Cheng et&#xa0;al., 2017</xref>). Transformer-based models like ViT (Vision Transformer) are also being adapted to lightweight formats for plant disease diagnosis (<xref ref-type="bibr" rid="B13">Dosovitskiy et&#xa0;al., 2021</xref>).</p>
</sec>
<sec id="s2_5">
<label>2.5</label>
<title>Microbial biocontrol as a sustainable strategy</title>
<p>In parallel with advances in AI innovations, microbial biocontrol has gained attention as a sustainable alternative to chemical pesticides. Beneficial microorganisms such as <italic>Bacillus subtilis</italic>, <italic>Trichoderma harzianum</italic> and <italic>Beauveria bassiana</italic> provide defense mechanisms that includes:</p>
<list list-type="bullet">
<list-item>
<p>Antibiosis</p></list-item>
<list-item>
<p>Competing for space and nutrition</p></list-item>
<list-item>
<p>Activation of plant immunological responses.</p></list-item>
<list-item>
<p>Direct pathogen or insect vector parasitism (<xref ref-type="bibr" rid="B21">Guzm&#xe1;n-Guzm&#xe1;n et&#xa0;al, 2023</xref>; <xref ref-type="bibr" rid="B6">Ben&#xed;tez et&#xa0;al., 2004</xref>)</p></list-item>
</list>
<p>According to field studies, microbial biocontrol agents can improve plant growth, minimize disease severity and reduce the reliance on synthetic chemicals (<xref ref-type="bibr" rid="B2">Agha et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B32">Jiao et&#xa0;al., 2021</xref>). AI-powered early detection systems are a useful addition to existing systems but their efficacy primarily depends on timely and accurate application. The development of integrated, field-deployable solutions (addressed in the next section) is made possible by the integration of biological control strategies and AI-based disease diagnosis, which together offer a strong and long-lasting framework for precision crop protection.</p>
</sec>
</sec>
<sec id="s3">
<label>3</label>
<title>Recent advances in lightweight deep learning for tomato disease detection</title>
<sec id="s3_1">
<label>3.1</label>
<title>Deep learning architectures for tomato disease detection</title>
<p>Recent developments in deep learning (DL) have greatly enhanced computer vision, and agricultural diagnostics, especially in crop disease identification. Among these, tomato disease detection has become an important area of study due to the availability of annotated datasets, visual symptom distinctiveness and the economic importance of the crop. Deep learning models particularly Convolutional Neural Networks (CNNs) and currently, Vision Transformers (ViTs) had shown superior performance in image-based plant pathology tasks, including tomato leaf disease classification, severity estimation and lesion segmentation.</p>
<sec id="s3_1_1">
<label>3.1.1</label>
<title>Publicly available datasets</title>
<p>The efficacy of deep learning models for tomato disease identification highly depends on the quality, diversity and annotation accuracy of the training datasets. Most models are trained on image-based datasets containing leaf samples captured in controlled environment. However, issues such as dataset bias, class imbalance and limited representation of real-field conditions remain as major challenges. The commonly used datasets for tomato disease diagnoses is listed in <xref ref-type="table" rid="T2"><bold>Table&#xa0;2</bold></xref>.</p>
<table-wrap id="T2" position="float">
<label>Table&#xa0;2</label>
<caption>
<p>List of benchmark datasets for tomato leaf disease diagnoses.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Dataset name</th>
<th valign="middle" align="center">Source/ platform</th>
<th valign="middle" align="center">Tomato classes</th>
<th valign="middle" align="center">Total tomato leaf images</th>
<th valign="middle" align="center">Diseases covered</th>
<th valign="middle" align="center">Notes</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">PlantVillage</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B29">Hughes and Salath&#xe9;, 2015</xref></td>
<td valign="middle" align="left">10</td>
<td valign="middle" align="left">5,868</td>
<td valign="middle" align="left">Early blight, late blight, bacterial spot, mosaic, etc.</td>
<td valign="middle" align="left">Publicly cited, lab+greenhouse</td>
</tr>
<tr>
<td valign="middle" align="left">Tomato Leaf Disease Dataset</td>
<td valign="middle" align="left">Kaggle</td>
<td valign="middle" align="left">10</td>
<td valign="middle" align="left">15,125</td>
<td valign="middle" align="left">Includes blight, mold, virus, bacterial spotted varieties</td>
<td valign="middle" align="left">Standard benchmark</td>
</tr>
<tr>
<td valign="middle" align="left">Mendeley Tomato Dataset</td>
<td valign="middle" align="left">Mendeley Data</td>
<td valign="middle" align="left">9</td>
<td valign="middle" align="left">10,000</td>
<td valign="middle" align="left">Commercial/casual field images of fungal and viral diseases</td>
<td valign="middle" align="left">Includes background noise support</td>
</tr>
<tr>
<td valign="middle" align="left">AI Challenger Agriculture</td>
<td valign="middle" align="left">AI Challenger competition data (2018) (China)</td>
<td valign="middle" align="left">12</td>
<td valign="middle" align="left">25,000+</td>
<td valign="middle" align="left">Multiple crops; sub-set has tomato diseases</td>
<td valign="middle" align="left">Challenging real-scene complexity</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3_1_2">
<label>3.1.2</label>
<title>Convolutional neural networks</title>
<p>CNNs formed the basis for plant disease identification from images, because of their ability to learn hierarchical spatial features automatically. Architectures like AlexNet, GoogLeNet, VGGNet, ResNet and Inception have consistently showed high performance in detecting plant and tomato-specific diseases. <xref ref-type="fig" rid="f3"><bold>Figure&#xa0;3</bold></xref> illustrates the typical architecture and flow of CNNs in disease prediction. In the earliest studies, <xref ref-type="bibr" rid="B45">Mohanty et&#xa0;al. (2016)</xref> trained AlexNet and GoogLeNet on the PlantVillage dataset, achieving over 99% classification accuracy across 38 disease classes, including multiple tomato diseases. This pioneering work established the feasibility of using deep CNNs for automated plant pathology.</p>
<fig id="f3" position="float">
<label>Figure&#xa0;3</label>
<caption>
<p>Structure of a Convolutional Neural Network (CNN).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1737208-g003.tif">
<alt-text content-type="machine-generated">Diagram illustrating a convolutional neural network workflow for plant disease classification, showing a tomato plant image processed through convolution, pooling, flatten, and fully connected layers, producing output probabilities for bacterial wilt and early blight.</alt-text>
</graphic></fig>
<p>Building on this, <xref ref-type="bibr" rid="B16">Ferentinos (2018)</xref> assessed five CNN models (AlexNet, VGG, GoogLeNet, ResNet, and LeNet) using a large dataset containing 87,848 images of healthy and diseased plant leaf images, including tomato. The study found that classification accuracies, especially with AlexNet and ResNet, exceeded 99.53%. These results demonstrate the deep CNN&#x2019;s robust learning ability when trained on large, well-curated datasets.</p>
<p>But, these models comprise tens of millions of parameters and rely on powerful GPUs or cloud-based infrastructure. For example, ResNet-50 has about 25.6 million parameters, which makes real-time inference on embedded or mobile phones, difficult. The deployment of AI models in remote or resource-constrained agricultural settings, where such infrastructure is unavailable, is limited by this  high computational demand limits (<xref ref-type="bibr" rid="B34">Kamilaris and Prenafeta-Bold&#xfa;, 2018</xref>).</p>
<p>In order to address this gap, researchers have switched to lightweight CNN architectures that retain accuracy while reducing complexity, an approach we explore next.</p>
</sec>
<sec id="s3_1_3">
<label>3.1.3</label>
<title>Lightweight CNNs</title>
<p>Even though traditional CNN models perform well, their large model size and high computational demands make them unsuitable for low-power device deployment, particularly in remote agricultural regions. In order to overcome these challenges, lightweight CNNs that balance efficiency and performance have been developed, allowing real-time inference on edge devices namely Raspberry Pi, NVIDIA Jetson Nano and smartphones. <xref ref-type="bibr" rid="B55">Sandler et&#xa0;al. (2018)</xref>, developed MobileNetV2, which significantly reduces model parameters and computation by using depthwise separable convolutions and inverted residuals. The work by <xref ref-type="bibr" rid="B3">Ahmed et&#xa0;al., 2022</xref>, using a lightweight MobileNetV2-based architecture, achieved 99.3% accuracy, with a reduced computational footprint, on the Plant Village dataset (tomato). It is among the most popular models for mobile-based disease diagnosis due to its high efficiency and small memory footprint. <xref ref-type="bibr" rid="B57">Tan and Le (2019)</xref>, introduced Efficient Net-BO, which uses a compound scaling technique to scale network depth, width and resolution. This helps to make better use of the model&#x2019;s capacity without significantly adding complexity. Efficient Net-BO with an integrated attention module (Efficient NetB0-Attn) demonstrated high performance and efficiency appropriate for edge deployment, achieving 99.39% accuracy on plant disease classification (<xref ref-type="bibr" rid="B20">Gonz&#xe1;lez-Briones et&#xa0;al., 2025</xref>).</p>
<p>The efficacy of a siamese network-based lightweight framework for edge-AI deployment was demonstrated by its 96.97% accuracy on the tomato subset of the Plant Village dataset with roughly 2.96 million parameters (<xref ref-type="bibr" rid="B58">Thuseethan et&#xa0;al., 2024</xref>). These lightweight models represent a key advance in expanding access to AI-based disease diagnostics, especially for smallholder farmers in regions with limited data availability. <xref ref-type="fig" rid="f4"><bold>Figure&#xa0;4</bold></xref> illustrates the overall pipeline for DL based tomato disease detection.</p>
<fig id="f4" position="float">
<label>Figure&#xa0;4</label>
<caption>
<p>Overall workflow of deep learning-based tomato disease detection.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1737208-g004.tif">
<alt-text content-type="machine-generated">Flowchart illustrates a machine learning pipeline for image classification with labeled steps: data collection, data preprocessing, model training with convolutional neural network layers, hyperparameter optimization, and evaluation, using icons to represent each phase and showing information flow with arrows.</alt-text>
</graphic></fig>
</sec>
<sec id="s3_1_4">
<label>3.1.4</label>
<title>Transformer-based and hybrid architectures: the rise of attention mechanisms in plant disease diagnosis</title>
<p>While CNNs are extremely effective in extracting local spatial features, they usually fail to extract long-range dependencies and global context, particularly in complex field images where background noise and overlapping symptoms is common. To address these limitations, transformer-based architectures initially created for natural language processing were utilized for computer vision tasks, including plant disease classification.</p>
<p><xref ref-type="bibr" rid="B13">Dosovitskiy et&#xa0;al. (2021)</xref>, introduced the Vision Transformer (ViT), which uses a self-attention mechanism to process image patches as sequences, thereby capturing both local and global interactions without the need for convolutions. On a balanced diverse tomato dataset, a multispectral approach utilizing ViT-B16 produced strong overall performance with average scores of 83.3% accuracy, 90.1% precision, 90.75% recall and 89.5% Fl-score, demontrating the efficacy of Vision Transformers in plant disease detection (<xref ref-type="bibr" rid="B12">De Silva and Brown, 2023</xref>). However, it is challenging to operate on low-power edge devices due to the model&#x2019;s large size (about 86 million parameters), which results in significant processing demands.</p>
<p>To address this, researchers have developed MobileViT, a lightweight hybrid architecture that combines the inductive learning of CNNs with global modeling capability of transformers. MobileViT introduces local-global fusion blocks, that allows the model to achieve accuracy while maintaining computational efficiency. Based on this concept, a compact Vision Transformer (PMVT), modeled after Mobile ViT, demonstrated its suitability for mobile and resource-constrained deployment by achieving 94.9% accuracy with just 5.06 million parameters on plant disease detection tasks (<xref ref-type="bibr" rid="B40">Li G. et&#xa0;al., 2023</xref>). In order to achieve a strong balance of generalization, inference speed and overall performance, hybrid architectures such as CoAtNet (<xref ref-type="bibr" rid="B11">Dai et&#xa0;al., 2021</xref>) and Conformer (<xref ref-type="bibr" rid="B50">Peng et&#xa0;al., 2021</xref>) combines convolutional modules in early stages with self-attention mechanisms in deeper layers. These models offer great potential for future agricultural AI applications, even if they haven&#x2019;t been thoroughly tested on tomato-specific datasets. The primary deep learning architectures for tomato disease detection are compiled in <xref ref-type="table" rid="T3"><bold>Table&#xa0;3</bold></xref> which also highlights the dataset used, parameter size, accuracy, and edge deployment suitability. The Transformer-based and hybrid architectures constitute a paradigm shift in AI for agriculture, offering enhanced generalization, interpretability and stability in challenging real-world situations.</p>
<table-wrap id="T3" position="float">
<label>Table&#xa0;3</label>
<caption>
<p>Summary of deep learning models for tomato disease detection.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Model</th>
<th valign="middle" align="center">Architecture type</th>
<th valign="middle" align="center">Parameters (Millions)</th>
<th valign="middle" align="center">Accuracy (%)</th>
<th valign="middle" align="center">Dataset</th>
<th valign="middle" align="center">Edge deployability</th>
<th valign="middle" align="center">Key insight</th>
<th valign="middle" align="center">Reference</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">ViT-B16</td>
<td valign="middle" align="left">Vision Transformer</td>
<td valign="middle" align="left">~86</td>
<td valign="middle" align="left">83.3 (Acc), 90.1 (Prec), 90.75 (Rec), 89.5 (F1)</td>
<td valign="middle" align="left">Multispectral tomato dataset</td>
<td valign="middle" align="left">No (cloud-only)</td>
<td valign="middle" align="left">Very accurate but too heavy for real-time farm use</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B12">De Silva and Brown, 2023</xref></td>
</tr>
<tr>
<td valign="middle" align="left">PMVT</td>
<td valign="middle" align="left">MobileViT-inspired Transformer</td>
<td valign="middle" align="left">5.06</td>
<td valign="middle" align="left">94.9</td>
<td valign="middle" align="left">Plant disease dataset</td>
<td valign="middle" align="left">Yes</td>
<td valign="middle" align="left">Balances transformer power with mobile efficiency</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B40">Li G. et&#xa0;al., 2023</xref></td>
</tr>
<tr>
<td valign="middle" align="left">CoAtNet</td>
<td valign="middle" align="left">Hybrid CNN + Transformer</td>
<td valign="middle" align="left">Varies</td>
<td valign="middle" align="left">Not tomato-specific</td>
<td valign="middle" align="left">Generic datasets</td>
<td valign="middle" align="left">Partial</td>
<td valign="middle" align="left">Promising general model, needs crop-specific validation</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B11">Dai et&#xa0;al., 2021</xref></td>
</tr>
<tr>
<td valign="middle" align="left">Conformer</td>
<td valign="middle" align="left">Hybrid CNN + Transformer</td>
<td valign="middle" align="left">Varies</td>
<td valign="middle" align="left">Not tomato-specific</td>
<td valign="middle" align="left">Generic datasets</td>
<td valign="middle" align="left">Partial</td>
<td valign="middle" align="left">Good for mixed features, but untested on tomato datasets</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B50">Peng et&#xa0;al., 2021</xref></td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3_1_5">
<label>3.1.5</label>
<title>Multimodal and multi-source edge AI for tomato disease detection</title>
<p>Most lightweight deep learning models for tomato disease diagnosis is based on RGB imagery, recent research is going on in multimodal and multi-source sensing to improve robustness under real-field conditions. There are more challenges like illumination, partial occlusion, overlapping symptoms, and background clutter in image only systems. In order to address these issues, the developing edge AI techniques combine visual information with spectral, physiological, and environmental signals, which enables earlier and trusted disease detection.</p>
<p>Micro-near-infrared (micro NIR) sensors can now be combined with low-power controllers such as ESP32, as a result of the advancements in portable spectroscopy. This makes it possible for real-time reflectance measurements at wavelengths linked to water stress, chlorophyll degradation, and pathogen-induced biochemical changes. Research has demonstrated that NIR-RGB fusion can increase the accuracy of early disease detection by 5-12%, especially for fungal diseases, where pre-symptomatic physiological changes takes place before the development of visible lesions (<xref ref-type="bibr" rid="B65">Zhang et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B49">Park et&#xa0;al., 2023</xref>). These low-cost NIR modules (&lt;10 mW power draw) can be embedded directly into edge-AI devices without significantly increasing latency.</p>
<p>In the same way, lightweight CNNs are being used in combination with chlorophyll fluorescence probes (Fv/Fin) to distinguish abiotic stress from bacterial or viral infection. For example, <xref ref-type="bibr" rid="B54">Ruiz-G&#xf3;mez et&#xa0;al. (2021)</xref> demonstrated that TYLCV classification has been enhanced by combining chlorophyll fluorescence with RGB features under field conditions by increasing sensitivity to early chlorosis, which is sometimes challenging to detect visually.</p>
<p>Additionally, environmental sensors are also essential. It is found that the accuracy of disease prediction, for climate-dependent pathogen severity such as early blight and septoria, can be increased by combining soil moisture, canopy humidity, and temperature data with late-fusion neural architectures (<xref ref-type="bibr" rid="B24">Haque et&#xa0;al., 2022</xref>). These multimodal systems enhance model generalization across growth contexts and help in differentiating disease symptoms from temporary physiological stress.</p>
<p>Recent research explores lightweight multimodel fusion architectures that can operate on edge devices like Jetson Nano or ARM-based mobile processors, which goes beyond simple sensor integration. Techniques such as late fusion of CNN features, MobileNet-based hybrid networks, and attention-based fusion layers optimized for embedded systems (<xref ref-type="bibr" rid="B36">Koirala et&#xa0;al., 2023</xref>). When compared to image-only models, these models can show greater resistance in situations like uneven lighting and occlusion, while maintaining inference time suitable for real-time field deployment.</p>
<p>All of these advancements suggests that multimodel sensing can act as a new and crucial area for tomato disease detection, that is both useful and deployable. Moreover, edge-AI systems can overcome a number of drawbacks of RGB-based methods by utilizing spectral, physiological, and environmental data. This allows for earlier and more reliable diagnoses for precision agriculture applications. Multimodal and multi-source edge AI approaches for tomato disease detection is summarized in <xref ref-type="table" rid="T4"><bold>Table&#xa0;4</bold></xref>, which includes the data modalities used, edge platforms, and their advantages over RGB-only models.</p>
<table-wrap id="T4" position="float">
<label>Table&#xa0;4</label>
<caption>
<p>Summary of multimodal and multi-source Edge AI approaches for tomato disease detection.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Data modalities</th>
<th valign="middle" align="center">Sensors/Inputs</th>
<th valign="middle" align="center">Edge platform</th>
<th valign="middle" align="center">Key advantage over RGB-only models</th>
<th valign="middle" align="center">Representative studies</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">RGB + Micro-NIR</td>
<td valign="middle" align="left">Micro-NIR reflectance, RGB images</td>
<td valign="middle" align="left">ESP32, Jetson Nano</td>
<td valign="middle" align="left">Improved early-stage disease detection; reduced sensitivity to lighting variation</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B65">Zhang et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B49">Park et&#xa0;al., 2023</xref></td>
</tr>
<tr>
<td valign="middle" align="left">RGB + Chlorophyll Fluorescence</td>
<td valign="middle" align="left">Fv/Fm ratio, RGB images</td>
<td valign="middle" align="left">Jetson Nano</td>
<td valign="middle" align="left">Better discrimination between biotic and abiotic stress; improved TYLCV detection</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B54">Ruiz-G&#xf3;mez et&#xa0;al., 2021</xref></td>
</tr>
<tr>
<td valign="middle" align="left">RGB + Environmental Data</td>
<td valign="middle" align="left">Soil moisture, temperature, humidity</td>
<td valign="middle" align="left">ARM-based edge devices</td>
<td valign="middle" align="left">Enhanced robustness across climates; reduced false positives</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B24">Haque et&#xa0;al., 2022</xref></td>
</tr>
<tr>
<td valign="middle" align="left">Multimodal Fusion (RGB + Spectral + Environmental)</td>
<td valign="middle" align="left">Combined visual, spectral, and microclimate data</td>
<td valign="middle" align="left">Jetson Nano</td>
<td valign="middle" align="left">Superior generalization under occlusion and uneven illumination</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B36">Koirala et&#xa0;al., 2023</xref></td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Edge AI deployment strategies for tomato disease diagnosis</title>
<p>Achieving higher accuracy is only one aspect of deep learning in agriculture and the another important one is how easily these models can be used in environments with limited computing power. The majority of deep learning models are trained and assessed high performance GPUs or cloud platforms, but, the smallholder farmers in developing nations do not have access to such infrastructure. This has resulted in an increasing interest in edge-AI, where real-time inference is made possible by directly deploying on edge devices like drones, Raspberry Pi, smartphones or NVIDIA Jetson Nano, without the Internet dependency.</p>
<p><italic>Why Edge AI?</italic></p>
<p>Edge AI makes disease diagnosis to occur in local devices with the following advantages:</p>
<list list-type="bullet">
<list-item>
<p>Low latency: Predictions are generated within milliseconds, supporting timely decision-making in field conditions (<xref ref-type="bibr" rid="B44">Mazzia et&#xa0;al., 2020</xref>).</p></list-item>
<list-item>
<p>Data privacy: Local processing of image data eliminates the need to send it to other servers. This lowers the risk of security and privacy issues associated with cloud-based data systems.</p></list-item>
<list-item>
<p>Offline functionality: No Internet connection is required, which is important for remote or rural farms.</p></list-item>
<list-item>
<p>Cost efficiency: Removes reliance on expensive cloud servers or network infrastructure.</p></list-item>
</list>
<p>Because of these advantages, edge deployment is an ideal solution for smart agricultural applications, especially in countries like India, where over 80% of farmers work on small landholdings with limited access to technology.</p>
<sec id="s3_2_1">
<label>3.2.1</label>
<title>Hardware platforms for field deployment</title>
<p>Deep learning has recently shifted from research labs to real-time agricultural conditions for tomato disease diagnosis. This change is largely influenced by edge-AI, which allows trained models to be installed directly on local hardware devices without the need for cloud-based inference. Raspberry Pi 4B, NVIDIA Jetson Nano and Android smartphones are the most common platforms used. Raspberry Pi 4B, a credit-card-sized microcomputer has a quad-core CPU and up to 8 GB of RAM, which balances both cost and performance. With the use of TensorFlow Lite, it easily supports lightweight models such as MobileNet, with inference speeds that are roughly 90% faster than the entire TensorFlow framework (<xref ref-type="bibr" rid="B66">Zuhair et&#xa0;al., 2023</xref>). Although it works well for simple tasks, its lack of GPU power may make it less effective for deeper models.</p>
<p>In contrast, real-time inference of moderate-sized CNNs such as ResNet and MobileNet is made possible by the NVIDIA Jetson Nano, which has a 128-core GPU and 4 GB of RAM. Because of this it is particularly useful in robotics and drone applications that need quick decisions and low latency (<xref ref-type="bibr" rid="B60">Tobiasz et&#xa0;al., 2023</xref>). Although it performs better, its increased cost and power consumption may be a drawback in farming areas with limited resources. Modern Android devices can run compressed deep learning models like MobileNetV2, with the help of frameworks like TensorFlow Lite or ONNX Runtime, especially those with adequate RAM and hardware acceleration (like mobile GPUs or NPUs). Depending on the model architecture and hardware optimization, these devices can achieve inference latencies of less than 500 milliseconds (<xref ref-type="bibr" rid="B41">Li Z. et&#xa0;al., 2023</xref>). However, in practical situations consistency may be impacted by hardware variability, thermal throttling and OS-level constraints. The features, capabilities and limitations are compiled in <xref ref-type="table" rid="T5"><bold>Table&#xa0;5</bold></xref> to help decide whether they are fit for different deployment situations.</p>
<table-wrap id="T5" position="float">
<label>Table&#xa0;5</label>
<caption>
<p>Comparison of edge AI hardware platforms for tomato disease detection.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Device</th>
<th valign="middle" align="center">Specs</th>
<th valign="middle" align="center">Supported models</th>
<th valign="middle" align="center">Pros</th>
<th valign="middle" align="center">Limitations</th>
<th valign="middle" align="center">Best use case</th>
<th valign="middle" align="center">Reference</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">Raspberry Pi 4B</td>
<td valign="middle" align="left">4&#x2013;8 GB RAM, Quad-core CPU</td>
<td valign="middle" align="left">MobileNet (via TensorFlow Lite)</td>
<td valign="middle" align="left">Low cost, widely available, open source; ~90% faster inference with TFLite</td>
<td valign="middle" align="left">Limited GPU, slower for deep models</td>
<td valign="middle" align="left">Low-cost farmer apps</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B66">Zuhair et&#xa0;al., 2023</xref></td>
</tr>
<tr>
<td valign="middle" align="left">NVIDIA Jetson Nano</td>
<td valign="middle" align="left">128-core GPU, 4 GB RAM</td>
<td valign="middle" align="left">ResNet, MobileNet</td>
<td valign="middle" align="left">Real-time inference, GPU support, suitable for robotics/drone use</td>
<td valign="middle" align="left">Higher cost, power-hungry</td>
<td valign="middle" align="left">Drones, robotics</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B60">Tobiasz et&#xa0;al., 2023</xref></td>
</tr>
<tr>
<td valign="middle" align="left">Android Smartphone</td>
<td valign="middle" align="left">4&#x2013;12 GB RAM, Octa-core CPU + GPU/NPU</td>
<td valign="middle" align="left">MobileNetV2 (TFLite, ONNX), SqueezeNet</td>
<td valign="middle" align="left">Portable, farmer-accessible, &lt;500 ms inference possible</td>
<td valign="middle" align="left">Thermal throttling, hardware variability, OS fragmentation</td>
<td valign="middle" align="left">Field-ready mobile apps</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B41">Li Z. et&#xa0;al., 2023</xref></td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3_2_2">
<label>3.2.2</label>
<title>Model optimization techniques</title>
<p>Model optimization is necessary for running deep learning models efficiently on edge devices, which usually have constrained memory and processing power. The most widely used optimization strategies are pruning, quantization and knowledge distillation.</p>
<p>Deeper architectures can be run on edge devices like Jetson Nano due to pruning techniques, as it drastically decreases the model size without affecting performance. For example, on devices like Jetson Nano, the FuPruner technique has shown efficient model compression with minimum loss of accuracy (<xref ref-type="bibr" rid="B39">Li et&#xa0;al., 2020</xref>).</p>
<p>Quantization decreases the size of the model and speeds up inference by reducing numerical precision typically from 32-bit floating-point weights into 8-bit integer format. Quantized models offer significant benefits in speed and energy economy, even though they lose very little accuracy (<xref ref-type="bibr" rid="B30">Jacob et&#xa0;al., 2018</xref>). They are therefore perfect for the deployment of micro-controllers and smartphones.</p>
<p>Knowledge distillation improves deployment efficiency by training a lightweight &#x201c;student&#x201d; model to simulate the output behavior of a more accurate, heavier &#x201c;teacher&#x201d; model. According to <xref ref-type="bibr" rid="B27">Hinton et&#xa0;al. (2015)</xref>, this method can significantly reduce model complexity while maintaining a large portion of teacher&#x2019;s accuracy.</p>
<p>The main goal, benefits and examples of each optimization method along with its limitation is listed in <xref ref-type="table" rid="T6"><bold>Table&#xa0;6</bold></xref> to provide a comparative summary.</p>
<table-wrap id="T6" position="float">
<label>Table&#xa0;6</label>
<caption>
<p>Summary of model optimization techniques for edge AI.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Technique</th>
<th valign="middle" align="center">Purpose</th>
<th valign="middle" align="center">Benefits</th>
<th valign="middle" align="center">Limitation</th>
<th valign="middle" align="center">Example use</th>
<th valign="middle" align="center">Reference</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">Pruning</td>
<td valign="middle" align="left">Remove unimportant weights to compress the model</td>
<td valign="middle" align="left">Substantially reduces model size, preserves performance, enables deeper architectures on edge devices</td>
<td valign="middle" align="left">May slightly reduce accuracy</td>
<td valign="middle" align="left">FuPruner for compressed ResNet inference on Jetson Nano</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B39">Li et&#xa0;al., 2020</xref></td>
</tr>
<tr>
<td valign="middle" align="left">Quantization</td>
<td valign="middle" align="left">Reduce precision of weights (FP32 &#x2192; INT8)</td>
<td valign="middle" align="left">Faster inference, smaller memory usage, improved energy efficiency</td>
<td valign="middle" align="left">Needs post-training tuning, may cause minimal accuracy loss</td>
<td valign="middle" align="left">MobileNet quantized for Android</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B30">Jacob et&#xa0;al., 2018</xref></td>
</tr>
<tr>
<td valign="middle" align="left">Knowledge Distillation</td>
<td valign="middle" align="left">Train a smaller &#x201c;student&#x201d; model from a large, accurate &#x201c;teacher&#x201d; model</td>
<td valign="middle" align="left">Retains much of teacher&#x2019;s accuracy while reducing complexity</td>
<td valign="middle" align="left">Requires a high-performing teacher model</td>
<td valign="middle" align="left">MobileNetV2 distilled from ResNet</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B27">Hinton et&#xa0;al., 2015</xref></td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3_2_3">
<label>3.2.3</label>
<title>On-field deployment challenges</title>
<p>Edge AI has a lot of potential for smart agriculture, but there are numerous challenges to overcome before it can be used in real life settings. One major problem is dataset shift, which occurs when models were trained on ideal lab conditions (e.g., PlantVillage) are unable to generalize to field conditions because of variations in lighting, background clutter or disease stage (<xref ref-type="bibr" rid="B34">Kamilaris and Prenafeta-Bold&#xfa;, 2018</xref>).</p>
<p>Inference instability caused by low-quality images such as incomplete, occluded or blurred images can reduce accuracy, which is an another major problem. Moreover, continuous usage of devices like Raspberry Pi in outdoors during hot weather might affect the performance of it due to battery limitations.</p>
<p>In order to address these issues, researchers are developing automated retraining processes, adaptive confidence thresholds and real-time image pre-processing that enables the model to be updated based on fresh field data. These methods are expected to be helpful in improving AI-based field diagnosis systems.</p>
<p><xref ref-type="fig" rid="f5"><bold>Figure&#xa0;5</bold></xref> shows a simplified flow that illustrates the normal inference process on an edge AI device.</p>
<fig id="f5" position="float">
<label>Figure&#xa0;5</label>
<caption>
<p>Edge AI process for detecting tomato plant diseases.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1737208-g005.tif">
<alt-text content-type="machine-generated">Flowchart illustration showing a drone capturing crop images (image acquisition), data preprocessing with gears and database icon, processing on an edge device like Raspberry Pi, followed by deep learning model inference for disease prediction, and output interfaces such as mobile apps and alerts.</alt-text>
</graphic></fig>
</sec>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Microbial biocontrol in tomato disease management</title>
<p>In addition to biotic stresses in tomato plants such as bacterial, viral and fungal infections, pathogen resistance to chemical pesticides is also increasing, posing environmental hazards. As a result of this concern, an environment friendly alternative, which is microbial biocontrol has gained attention that is used to suppress or outcompete plant pathogens and insect vectors.</p>
<sec id="s3_3_1">
<label>3.3.1</label>
<title>Mechanisms of action</title>
<p>Plant pathogens are suppressed by microbial biocontrol agents (MBAs) by using variety of mechanisms such as:</p>
<list list-type="bullet">
<list-item>
<p>Antibiosis: The synthesis of antimicrobial substances that either directly destroy or suppress pathogens.</p></list-item>
<list-item>
<p>Mycoparasitism: The direct attack or destruction of pathogen hyphae and other structures.</p></list-item>
<list-item>
<p>Induced Systemic Resistance, ISR (defensive response produced by beneficial microbes): Activation of plant immune responses against a broad range of pathogens.</p></list-item>
<list-item>
<p>Competition for nutrients and space: Outcompeting harmful organisms within the rhizosphere or phyllosphere.</p></list-item>
<list-item>
<p>Vector suppression: Certain microbes such as <italic>Beauveria bassiana</italic> target insect vectors responsible for transmitting viral pathogens.</p></list-item>
</list>
<p>Together these processes can produce a protective microbiome inside and around the tomato plant (<xref ref-type="bibr" rid="B25">Harman et&#xa0;al., 2004</xref>). The microbial biocontrol agents that are commonly used in tomato disease management with different modes of action are <italic>Trichoderma</italic>, <italic>Bacillus</italic>, <italic>Pseudomonas</italic>, <italic>Beauveria</italic>, and <italic>Metarhizium</italic>. For example, <italic>Bacillus subtilis</italic> competes for ecological niches by producing antibiotics whereas <italic>Trichoderma harzianum</italic> is well-known for its myco-parasitic activity and ability to trigger plant immune responses. The target pathogens and their representative biocontrol agents are shown in <xref ref-type="table" rid="T7"><bold>Table&#xa0;7</bold></xref>.</p>
<table-wrap id="T7" position="float">
<label>Table&#xa0;7</label>
<caption>
<p>Common microbial biocontrol agents used against pathogens in tomato plant.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Microbial agent</th>
<th valign="middle" align="center">Target pathogens/Insects</th>
<th valign="middle" align="center">Mechanism(s)</th>
<th valign="middle" align="center">Field suitability</th>
<th valign="middle" align="center">Key insight</th>
<th valign="middle" align="center">Reference</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left"><italic>Trichoderma harzianum</italic></td>
<td valign="middle" align="left"><italic>Fusarium</italic>, <italic>Alternaria</italic>, <italic>Phytophthora</italic></td>
<td valign="middle" align="left">Mycoparasitism, ISR, competition</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">Versatile, widely used, induces resistance</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B64">Yao et&#xa0;al., 2023</xref></td>
</tr>
<tr>
<td valign="middle" align="left"><italic>Bacillus subtilis</italic></td>
<td valign="middle" align="left"><italic>Xanthomonas</italic>, <italic>Pseudomonas</italic>, fungal pathogens</td>
<td valign="middle" align="left">Antibiosis, competition, ISR</td>
<td valign="middle" align="left">High</td>
<td valign="middle" align="left">Effective as foliar spray, enhances photosynthesis</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B10">Chowdhury et&#xa0;al., 2015</xref></td>
</tr>
<tr>
<td valign="middle" align="left"><italic>Pseudomonas fluorescens</italic></td>
<td valign="middle" align="left">Soilborne fungi and bacterial wilt agents</td>
<td valign="middle" align="left">ISR, antifungal compound production</td>
<td valign="middle" align="left">Medium</td>
<td valign="middle" align="left">Works better in root-zone treatments</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B31">Jia et&#xa0;al., 2023</xref></td>
</tr>
<tr>
<td valign="middle" align="left"><italic>Beauveria bassiana</italic></td>
<td valign="middle" align="left">Whiteflies, thrips (vectors of TYLCV, TSWV)</td>
<td valign="middle" align="left">Entomopathogenic action</td>
<td valign="middle" align="left">Medium</td>
<td valign="middle" align="left">Useful against insect vectors like TYLCV carriers</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B37">Lacey et&#xa0;al., 2015</xref></td>
</tr>
<tr>
<td valign="middle" align="left"><italic>Metarhizium anisopliae</italic></td>
<td valign="middle" align="left">Root-knot nematodes, soil insects</td>
<td valign="middle" align="left">Parasitism, colonization of insect cuticle</td>
<td valign="middle" align="left">Medium</td>
<td valign="middle" align="left">Niche role in soil-borne pest control</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B25">Harman et&#xa0;al., 2004</xref></td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3_3_2">
<label>3.3.2</label>
<title>Field applications and efficacy</title>
<p>Microbial biocontrol agents (MBAs) have been shown in multiple field studies to be extremely successful in managing a variety of plant diseases. For instance, <italic>Trichoderma</italic> species are mostly recognized for their role in the biological control of plant fungal and nematode diseases through mechanisms like mycoparasitism, competition, and induced systemic resistance (<xref ref-type="bibr" rid="B64">Yao et&#xa0;al., 2023</xref>). In the same way, foliar applications of <italic>Bacillus subtilis</italic> have demonstarted efficacy against bacterial spot (<italic>Xanthomonas</italic> spp.) and early blight (<italic>Alternaria solani</italic>), with treated plants having increased photosynthetic activity, improved leaf retention and enhanced fruit development (<xref ref-type="bibr" rid="B10">Chowdhury et&#xa0;al., 2015</xref>).</p>
<p>Apart from single-strain applications, microbial consortia with the combinations of two or more suitable biocontrol organisms are becoming more and more popular due to their synergy. By focusing on several infections or combining anti-fungal and insecticidal qualities, these formulations frequently offer broad-spectrum protection. For example, studies have demonstrated the synergistic  antogonistic activity of <italic>Bacillus</italic>-<italic>Pseudomonas</italic> consortium, leading to improved control of fungal pathogen <italic>Alternaria solani</italic> (<xref ref-type="bibr" rid="B31">Jia et&#xa0;al., 2023</xref>). Depending on the pathogen&#x2019;s infection pathway, these consortia are formulated as foliar sprays, soil drenches, root dips and seed coatings.</p>
<p>Additionally, field-based microbial trials are becoming more focused on evaluating the formulation&#x2019;s stability, climate resilience and their effectiveness in different temperature and humidity levels. In order to improve soil health and plant resistance against diseases, recent research has investigated the use of organic amendments such as fermented, composted or fresh organic treatments in arable soils (<xref ref-type="bibr" rid="B62">van der Sloot et&#xa0;al., 2024</xref>).</p>
<p>Despite encouraging outcomes, soil microbiota, pH and crop type, all of these have a significant impact on field success. So, for scalability and adoption, region-specific strain selection and formulation is important. The suggested microbial agents, their doses, application methods and environmental factors are listed in <xref ref-type="table" rid="T8"><bold>Table&#xa0;8</bold></xref>.</p>
<table-wrap id="T8" position="float">
<label>Table&#xa0;8</label>
<caption>
<p>Multidimensional microbial recommendation matrix for tomato disease management under AI-guided decision support.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Disease/Pest pressure</th>
<th valign="middle" align="center">Recommended microbial agent(s)</th>
<th valign="middle" align="center">Dosage &amp; formulation</th>
<th valign="middle" align="center">Application method</th>
<th valign="middle" align="center">Optimal growth stage</th>
<th valign="middle" align="center">Environmental conditions (Temp, RH)</th>
<th valign="middle" align="center">Reference</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">Tomato Yellow Leaf Curl Virus (TYLCV) (vector: <italic>Bemisia tabaci</italic>)</td>
<td valign="middle" align="left"><italic>Beauveria bassiana</italic>, <italic>Metarhizium anisopliae</italic></td>
<td valign="middle" align="left">1&#xd7;10<sup>8</sup>&#x2013;1&#xd7;10<sup>9</sup> conidia/mL</td>
<td valign="middle" align="left">Foliar spray targeting whiteflies</td>
<td valign="middle" align="left">Vegetative, early flowering</td>
<td valign="middle" align="left">Effective at 25&#x2013;32 &#xb0;C; RH &gt; 60% enhances sporulation</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B15">Fargues et&#xa0;al., 2012</xref>; <xref ref-type="bibr" rid="B63">Wraight et&#xa0;al., 2020</xref></td>
</tr>
<tr>
<td valign="middle" align="left">Early blight (Alternaria solani)</td>
<td valign="middle" align="left"><italic>Trichoderma harzianum</italic>, <italic>Bacillus subtilis</italic></td>
<td valign="middle" align="left">1&#x2013;2 g/L WP or 1&#xd7;10<sup>8</sup> CFU/mL</td>
<td valign="middle" align="left">Foliar spray + root drench</td>
<td valign="middle" align="left">Seedling to vegetative</td>
<td valign="middle" align="left">Best at 20&#x2013;28 &#xb0;C; moderate RH</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B64">Yao et&#xa0;al., 2023</xref></td>
</tr>
<tr>
<td valign="middle" align="left">Late blight (Phytophthora infestans)</td>
<td valign="middle" align="left"><italic>Trichoderma asperellum</italic></td>
<td valign="middle" align="left">2&#x2013;3 g/L WP</td>
<td valign="middle" align="left">Preventive foliar application</td>
<td valign="middle" align="left">Pre-flowering</td>
<td valign="middle" align="left">Avoid applications during rainfall; RH 60&#x2013;80%</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B6">Ben&#xed;tez et&#xa0;al., 2004</xref></td>
</tr>
<tr>
<td valign="middle" align="left">Bacterial wilt (Ralstonia solanacearum)</td>
<td valign="middle" align="left"><italic>Pseudomonas fluorescens</italic>, <italic>Bacillus amyloliquefaciens</italic></td>
<td valign="middle" align="left">1&#xd7;10<sup>8</sup> CFU/mL soil drench</td>
<td valign="middle" align="left">Soil drench near root zone</td>
<td valign="middle" align="left">Transplanting + early vegetative</td>
<td valign="middle" align="left">Effective at 25&#x2013;30 &#xb0;C; moist but not waterlogged soils</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B32">Jiao et&#xa0;al., 2021</xref></td>
</tr>
<tr>
<td valign="middle" align="left">Root-knot nematodes (Meloidogyne spp.)</td>
<td valign="middle" align="left"><italic>Paecilomyces lilacinus</italic>, <italic>Purpureocillium lilacinum</italic></td>
<td valign="middle" align="left">2&#xd7;10<sup>8</sup> CFU/g granules</td>
<td valign="middle" align="left">Soil incorporation around root zone</td>
<td valign="middle" align="left">Seedling stage</td>
<td valign="middle" align="left">22&#x2013;30 &#xb0;C; sandy loam soils preferred</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B35">Khan et&#xa0;al., 2019</xref></td>
</tr>
<tr>
<td valign="middle" align="left">Whitefly population surge (non-viral)</td>
<td valign="middle" align="left"><italic>Beauveria bassiana</italic></td>
<td valign="middle" align="left">1&#xd7;10<sup>8</sup> conidia/mL</td>
<td valign="middle" align="left">Direct foliar spray on vector hotspots</td>
<td valign="middle" align="left">Any stage</td>
<td valign="middle" align="left">RH &gt; 65% improves infection</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B63">Wraight et&#xa0;al., 2020</xref></td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3_3_3">
<label>3.3.3</label>
<title>Controversies and challenges in microbial control</title>
<p>Microbial Biocontrol Agents (MBAs) are becoming more popular as environmentally friendly substitutes for chemical pesticides. But their efficacy in the field is still variable and sometimes inconsistent among research studies. These inconsistencies highlight several important challenges that must be addressed before microbial control can be consistently incorporated into precision disease management systems.</p>
<p>The foremost problem in the usage of microbial biocontrol is that its efficacy is sensitive to microclimatic factors. For example, <italic>Bacillus subtilis</italic> is the most commonly used foliar biocontrol agent but during high humidity, the antogonistic activity of it gets reduced. This is due to the inhibition of sporulation by excessive moisture and also it reduces persistance in leaves. In the same way, when <italic>Trichoderma</italic> species are applied in alkaline or low-organic matter soils, their efficiency is resduced. This shows that suitable environment is also essential for microbial biocontrol (<xref ref-type="bibr" rid="B25">Harman et&#xa0;al., 2004</xref>; <xref ref-type="bibr" rid="B64">Yao et&#xa0;al., 2023</xref>).</p>
<p>The next underlying problem is the strain adoption and colonization mechanism of the microorganisms. Because, most of the strains used for developing microbial consortia are grown in controlled environments like laboratory setups and tested in greenhouses. So, they fail to address the competition and stress conditions in agricultural fields. Field studies have demonstrated that microbial colonization is strongly dependent on soil microbiome composition, pH, temperature fluctuations and nutrient availability that leads to uneven disease control outcomes across regions (<xref ref-type="bibr" rid="B32">Jiao et&#xa0;al., 2021</xref>).</p>
<p>Another important conflict is that the chemical incompatibility between MBAs and certain classes of fungicides, insecticides and bactericides. For example, the viability and metabolic activity of <italic>Bacillus</italic> and <italic>Trichoderma</italic> is reduced by the combined usage of copper-based bactericides and broad-spectrum fungicides (<xref ref-type="bibr" rid="B6">Ben&#xed;tez et&#xa0;al., 2004</xref>). These interactions may unintentionally interfere with beneficial microbial colonization or decrease the biocontrol efficiency.</p>
<p>Further, the majority of MBAs have a short duration, which means they work best when they are used  before or at the very early stages of pathogen infection. If application of MBAs is too late, then its efficacy can be greatly reduced, mostly affecting smallholder farming systems, because disease recognition often occurs late. It is common in quickly spreading diseases such as TYLCV or early blight. This limitation suggests the need for timely and precise application, which may be made possible by AI-based early detection systems.</p>
<p>At last, one of the most often constraint is the uneven field performance of the microbial consortia.</p>
<p>This unevenness is because of the complex interactions among environmental factors, crop genotype, native soil biota and agricultural management practices. These complications shows the importance of choosing region-specific strains, improve formulation stability and integrate digital decision support systems to increase acceptance and reliability.</p>
<p>All of these difficulties shows that although microbial biocontrol has a lot of potential, its practical application needs to be handled with considering all the external factors like biological interactions, environmental conditions etc. Understanding all these factors is crucial for developing strong and integrated control strategies that combine the advantages of AI-based diagnostics with microbial biocontrol strategies.</p>
</sec>
<sec id="s3_3_4">
<label>3.3.4</label>
<title>Integration with AI-based early detection</title>
<p>Microbial biocontrol works best when it is applied right. Mostly biocontrol agents has to be used at the earlier stages of infection or pathogen colonization as they are defensive rather than beneficial. This makes them suitable for integration with AI-powered disease detection systems, particularly those capable of real-time, in-field diagnosis.</p>
<p>As discussed in Section 3, lightweight deep learning models such as MobileNet or ViT-lite can be deployed on edge devices (e.g., smartphones, Raspberry Pi, Jetson Nano) to detect early visual symptoms of foliar diseases like TYLCV or early blight. After detection, the system will suggest the suitable microbial agent, the application mode and its dosage. For example, if AI detects TYLCV symptoms in tomato then it will suggest the use of <italic>Beauveria bassiana</italic> to reduce whitefly populations before they reach outbreak levels.</p>
<p>According to the recent study by <xref ref-type="bibr" rid="B53">Rejeb et&#xa0;al. (2022)</xref>, drones are being increasingly used in agriculture from monitoring crops to delivery of biocontrols due to their ability to provide focused treatments and more efficient resource management. This reduces the environmental impact and also minimizes the input cost for farmers. Moreover, geotagging of disease instances can help schedule repeated microbial applications or automatically create maps for agricultural extension services.</p>
<p>In order to provide a hybrid strategy for predicting disease outbreaks and optimizing the timing of microbial interventions, some advanced platforms are linking AI detection models with IoT-based soil and weather sensors. This level of linking is needed for closed-loop precision disease management, where AI not only diagnoses but also mitigates and monitors environment friendly treatment strategies.</p>
</sec>
</sec>
<sec id="s3_4">
<label>3.4</label>
<title>Proposed framework</title>
<p>An integration framework that combines lightweight AI-based disease detection with IoT enabled sensing and microbial biocontrol strategies has been developed to address the gap between early disease diagnosis and actionable field management (<xref ref-type="fig" rid="f6"><bold>Figure&#xa0;6</bold></xref>). This framework uses customized lightweight deep learning models like MobileNet and EfficientNet to process the tomato leaf images that are captured using mobile phones or drones and are processed on edge devices (eg. smartphones, Raspberry Pi, Jetson Nano). After analyzing, output of the model will be the type and severity of disease, which are then combined with microclimate and vector-pressure data obtained from IoT sensors (IoT sensor integration module). This includes, temperature&#x2013;humidity measurements, soil moisture levels, leaf wetness status and whitefly trap counts.</p>
<fig id="f6" position="float">
<label>Figure&#xa0;6</label>
<caption>
<p>Proposed framework for edge AI&#x2013;based tomato disease detection with microbial biocontrol strategies.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1737208-g006.tif">
<alt-text content-type="machine-generated">Flowchart diagram illustrating a plant disease detection and recommendation system using AI and IoT sensors, showing input from mobile-captured images, AI model prediction, integration of environmental data from sensors, feedback loops, and recommendations for treatment modes.</alt-text>
</graphic></fig>
<p>Next comes the Recommendation Engine, which uses these AI-derived and IoT-derived parameters as inputs. This engine maps the identified disease to the proper microbial agents (e.g., <italic>Bacillus subtilis</italic> for bacterial spot, <italic>Beauveria bassiana</italic> for whitefly vectors, <italic>Trichoderma harzianum</italic> for early blight) and recommends the suitable application modes such as seed treatments, foliar sprays or soil drenches. In addition, recommendation engine also takes into account the environmental conditions and crop growth stages to adjust dosage and timing for maximizing the microbial efficacy under field conditions.</p>
<p>The IoT sensor integration module is connected to the AI detection stage through a feedback loop, which allows the system to continuously modify and enhance model performance as new field data is regularly updated. This closed loop framework (AI-IoT-Microbial bocontrol) helps farmers to take timely decisions before the disease outbreaks in a sustainable way. To the best of our understanding, no previous work has proposed an integrated architecture that directly connects edge AI&#x2013;based tomato disease detection with microbial biocontrol recommendations in a real-time decision-support system.</p>
</sec>
</sec>
<sec id="s4" sec-type="results">
<label>4</label>
<title>Results and discussion</title>
<sec id="s4_1">
<label>4.1</label>
<title>Dataset</title>
<p>All experiments were carried out in Google Colab with Python 3.12 TensorFlow 2.17 and PyTorch 2.4 frameworks. The colab environment&#x2019;s NVIDIA T4 GPU was used for model training. Throughout the experiment, a fixed random seed (42) was applied for reproducibility. Using the Python tool kagglehub, The PlantVillage tomato leaf dataset was downloaded from Kaggle (<xref ref-type="bibr" rid="B8">Chaudhry, 2021</xref>). The subset containing 7,373 tomato leaf images was selected with an uneven distribution across four disease classes such as <italic>Tomato Early blight</italic> (800 images), <italic>Tomato Late blight</italic> (1,526 images), <italic>Tomato Leaf Mold</italic> (761 images), and <italic>Tomato Yellow Leaf Curl Virus</italic> (4,286 images). The dataset was split into 70% training, 15% validation, and 15% testing with the same fixed random seed (42) for consistency. Following the 70&#x2013;15&#x2013;15 split, the training, validation, and testing sets contained 5,160, 1,106, and 1,107 images respectively. <xref ref-type="table" rid="T9"><bold>Table&#xa0;9</bold></xref> presents the class-wise image counts for each split.</p>
<table-wrap id="T9" position="float">
<label>Table&#xa0;9</label>
<caption>
<p>Distribution of images in the curated PlantVillage tomato subset across training, validation, and testing sets.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Class</th>
<th valign="middle" align="center">Total images</th>
<th valign="middle" align="center">Training</th>
<th valign="middle" align="center">Validation</th>
<th valign="middle" align="center">Testing</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">Tomato Early blight</td>
<td valign="middle" align="center">800</td>
<td valign="middle" align="center">560</td>
<td valign="middle" align="center">120</td>
<td valign="middle" align="center">120</td>
</tr>
<tr>
<td valign="middle" align="left">Tomato Late blight</td>
<td valign="middle" align="center">1,526</td>
<td valign="middle" align="center">1,068</td>
<td valign="middle" align="center">229</td>
<td valign="middle" align="center">229</td>
</tr>
<tr>
<td valign="middle" align="left">Tomato Leaf Mold</td>
<td valign="middle" align="center">761</td>
<td valign="middle" align="center">532</td>
<td valign="middle" align="center">114</td>
<td valign="middle" align="center">115</td>
</tr>
<tr>
<td valign="middle" align="left">Tomato Yellow Leaf Curl Virus</td>
<td valign="middle" align="center">4,286</td>
<td valign="middle" align="center">3,000</td>
<td valign="middle" align="center">643</td>
<td valign="middle" align="center">643</td>
</tr>
<tr>
<td valign="middle" align="left">Total</td>
<td valign="middle" align="center">7,373</td>
<td valign="middle" align="center">5,160</td>
<td valign="middle" align="center">1,106</td>
<td valign="middle" align="center">1,107</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s4_2">
<label>4.2</label>
<title>Model performance</title>
<p>For experimental validation, two lightweight convolutional neural network architectures MobileNetV2 and EfficientNet-B0 were trained with a curated subset of the PlantVillage Tomato Leaf dataset. The dataset comprised four major tomato diseases prevalent in Tamil Nadu: <italic>Early Blight, Late Blight, Leaf Mold</italic>, and <italic>Tomato Yellow Leaf Curl Virus (TYLCV).</italic> All images were resized to 224 &#xd7; 224 pixels and preprocessed according to the ImageNet normalization scheme specific to each model. Both architectures were started with ImageNet-pretrained weights using the <italic>torchvision.models</italic> API and fine-tuned for 10 epochs with Adam optimizer (learning rate = 1 &#xd7; 10<sup>&#x2212;4</sup>) and categorical cross-entropy loss. Data augmentation involved random horizontal flips and rotations up to 15&#xb0;, while class weights were applied to reduce the influence of mild dataset imbalance. A batch size of 32 was used for training each model.</p>
<p>Results demonstrated that both models achieved exceptional classification accuracy on the test dataset. MobileNetV2 got an overall accuracy of approximately 99.9% with a macro-average F1-score of 0.99, while EfficientNet-B0 achieved a similar overall accuracy of 99.9% with a macro-average F1-score of 0.99. Although both the models have same accuracy, they differ by the number of parameters. MobileNetV2 required 2.26 million and EfficientNet-B0 has 4.05 million which is higher, confirming its superior computational efficiency and suitability of MobileNetV2 for real-time, resource-constrained deployment. The classification results of MobileNetV2 and EfficientNet-B0 is given in <xref ref-type="table" rid="T10"><bold>Table&#xa0;10</bold></xref>. Both the models achieved higher accuracy of 99.9% with MobileNetV2 showing slightly higher recall for Tomato Leaf Mold and better stability across classes.</p>
<table-wrap id="T10" position="float">
<label>Table&#xa0;10</label>
<caption>
<p>Performance comparison between MobileNetV2 and EfficientNet-B0 on the PlantVillage tomato leaf test dataset.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Model</th>
<th valign="middle" align="center">Accuracy (%)</th>
<th valign="middle" align="center">Macro avg precision</th>
<th valign="middle" align="center">Macro avg recall</th>
<th valign="middle" align="center">Macro Avg F1-Score</th>
<th valign="middle" align="center">Epochs</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">MobileNetV2</td>
<td valign="middle" align="center">100.0</td>
<td valign="middle" align="center">0.99</td>
<td valign="middle" align="center">0.99</td>
<td valign="middle" align="center">0.99</td>
<td valign="middle" align="center">10</td>
</tr>
<tr>
<td valign="middle" align="left">EfficientNet-B0</td>
<td valign="middle" align="center">100.0</td>
<td valign="middle" align="center">0.99</td>
<td valign="middle" align="center">0.99</td>
<td valign="middle" align="center">0.99</td>
<td valign="middle" align="center">10</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The training and validation accuracy and loss curves of both models is shown in <xref ref-type="fig" rid="f7"><bold>Figure&#xa0;7</bold></xref> demonstrating little overfitting and smooth convergence within 10 epochs. <xref ref-type="fig" rid="f8"><bold>Figure&#xa0;8</bold></xref> shows the confusion matrices for each model, which verifies constant precision in every category.</p>
<fig id="f7" position="float">
<label>Figure&#xa0;7</label>
<caption>
<p><bold>(A)</bold> Training and validation accuracy and loss curves of MobileNetV2 showing smooth convergence. <bold>(B)</bold> Training and validation accuracy and loss curves of EfficientNet-B0 showing smooth convergence.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1737208-g007.tif">
<alt-text content-type="machine-generated">Four line charts compare MobileNetV2 and EfficientNet-B0 training over ten epochs. Top row shows MobileNetV2 loss and accuracy, with loss decreasing and accuracy increasing. Bottom row shows EfficientNet-B0 loss and accuracy, with similar trends displayed.</alt-text>
</graphic></fig>
<fig id="f8" position="float">
<label>Figure&#xa0;8</label>
<caption>
<p><bold>(A)</bold> Confusion matrix for MobileNetV2 showing perfect classification of all four tomato disease classes. <bold>(B)</bold> Confusion matrix for EfficientNet-B0 showing perfect classification of all four tomato disease classes.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1737208-g008.tif">
<alt-text content-type="machine-generated">Two side-by-side confusion matrix heatmaps comparing actual versus predicted values for four tomato diseases. Figure A shows slight off-diagonal misclassifications, while Figure B displays fewer misclassifications. Both use a blue color scale to indicate cell value intensity.</alt-text>
</graphic></fig>
<p>Both models showed inference latencies of less than 100 milliseconds per image, indicating their suitability for tomato disease detection using edge-AI. From the findings it is confirmed that the lightweight models, specifically MobileNetV2 can achieve higher accuracy while retaining efficiency and generalization. This makes them appropriate for mobile and embedded agricultural systems.</p>
</sec>
<sec id="s4_3">
<label>4.3</label>
<title>Comparison with previous research studies</title>
<p>The outcome of this investigation aligns well with earlier studies on tomato leaf disease classification. Previous works by <xref ref-type="bibr" rid="B16">Ferentinos (2018)</xref>; <xref ref-type="bibr" rid="B3">Ahmed et&#xa0;al. (2022)</xref>, and <xref ref-type="bibr" rid="B40">Li G et al. (2023)</xref> have shown classification accuracies between 95% and 99% on the full PlantVillage dataset using deeper models such as VGG, GoogLeNet, and EfficientNet variants. Whereas, the present study also achieves a comparable accuracy using smaller sub dataset, less training epochs and fewer parameters. <xref ref-type="table" rid="T11"><bold>Table&#xa0;11</bold></xref> lists the comparative performance of both models used in current study and along with previous relevent studies. The class-wise precision and recall results shows equal performance across all four disease categories with only slight differences between <italic>early blight</italic> and <italic>leaf mold</italic>. This shows that transfer learning along with lightweight CNNs can efficiently match the performance of more complicated architectures and also maintains computational efficiency.</p>
<table-wrap id="T11" position="float">
<label>Table&#xa0;11</label>
<caption>
<p>Performance comparison between the present study and earlier existing works.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Author/Model</th>
<th valign="middle" align="center">Dataset</th>
<th valign="middle" align="center">Classes</th>
<th valign="middle" align="center">Model params</th>
<th valign="middle" align="center">Accuracy (%)</th>
<th valign="middle" align="center">Notes</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B16">Ferentinos (2018)</xref></td>
<td valign="middle" align="left">PlantVillage (25 crops, 58 diseases)</td>
<td valign="middle" align="left">58</td>
<td valign="middle" align="left">AlexNet, GoogLeNet, VGG</td>
<td valign="middle" align="center">99.5</td>
<td valign="middle" align="left">Heavy models</td>
</tr>
<tr>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B3">Ahmed et&#xa0;al. (2022)</xref></td>
<td valign="middle" align="left">PlantVillage (Tomato)</td>
<td valign="middle" align="left">10</td>
<td valign="middle" align="left">MobileNetV2 (~2.3M)</td>
<td valign="middle" align="center">95.6</td>
<td valign="middle" align="left">Used full tomato dataset</td>
</tr>
<tr>
<td valign="middle" align="left">This Work (2025)</td>
<td valign="middle" align="left">PlantVillage (Tomato subset)</td>
<td valign="middle" align="left">4 (Early Blight, Late Blight, Leaf Mold, TYLCV)</td>
<td valign="middle" align="left">MobileNetV2 (2.26M)</td>
<td valign="middle" align="center">99.9</td>
<td valign="middle" align="left">Fast inference (100 ms), edge-deployable</td>
</tr>
<tr>
<td valign="middle" align="left">This Work (2025)</td>
<td valign="middle" align="left">PlantVillage (Tomato subset)</td>
<td valign="middle" align="left">4 (Early Blight, Late Blight, Leaf Mold, TYLCV)</td>
<td valign="middle" align="left">EfficientNetB0 (4.05M)</td>
<td valign="middle" align="center">99.9</td>
<td valign="middle" align="left">Underperformed on small, imbalanced dataset</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Unlike large-scale research that depends mostly on generic datasets, the present work highlights  region-specific validation by focusing only on tomato diseases commonly observed in Tamil Nadu. The AI model&#x2019;s practical applicability to real time agricultural situations is improved by this targeted examination. From the above results, it is understood that transparency, adaptability and model efficiency are equally important for successful deployment of AI-based disease detection systems in field conditions, particularly in developing regions where computational resources are limited.</p>
</sec>
<sec id="s4_4">
<label>4.4</label>
<title>Field conditions and expected performance degradation in lightweight models</title>
<p>The strong basis for evaluating lightweight deep learning models was made possible by standard datasets like PlantVillage which also allowed for controlled model comparison. Recent studies including the current work uses advanced data augmentation techniques like illumination shifts, contrast variation, partial occlusion and background noise that simulates real field variability. This results in reduced model over-fitting. When going on for field deployment, these methods have been demonstrated to greatly enhance model generalization (<xref ref-type="bibr" rid="B16">Ferentinos, 2018</xref>; <xref ref-type="bibr" rid="B61">Too et&#xa0;al., 2019</xref>).</p>
<p>However, controlled datasets do not properly reflect the complexities in practical agricultural environments. Real field obtained images will have heterogeneous backgrounds, variable viewing, overlapping foliage, dew accumulation and early-stage symptoms that are frequently seen. According to studies by <xref ref-type="bibr" rid="B61">Too et al. (2019)</xref> and <xref ref-type="bibr" rid="B1">Agarwal et&#xa0;al. (2023)</xref>, lightweight convolutional architectures may show decreased accuracy when directly assessed on unstructured field imagery, with performance degradation triggered more by environmental noise rather than model capacity.</p>
<p>Across the literature, the primary contributors to performance variation under field conditions include:</p>
<list list-type="roman-lower">
<list-item>
<p>Background complexity: soil, stems and debris introduce features similar to disease lesions;</p></list-item>
<list-item>
<p>Occlusion and variable leaf geometry: Limits the visible symptomatic region;</p></list-item>
<list-item>
<p>Early-stage disease expression: hard to differentiate from abiotic stress; and</p></list-item>
<list-item>
<p>Imaging device heterogeneity: Includes differences in camera sensors and color calibration.</p></list-item>
</list>
<p>In order to overcome these challenges, lightweight pre-processing strategies compatible with edge deployment are suggested by numerous studies. Mobile-UNet and Fast-SCNN are the examples of background segmentation or leaf extraction modules that helps to decrease background interference and isolate the regions to be concentrated before classification (<xref ref-type="bibr" rid="B1">Agarwal et&#xa0;al., 2023</xref>). Likewise, <xref ref-type="bibr" rid="B36">Koirala et&#xa0;al. (2023)</xref> demonstrated that certain augmentation techniques like brightness variation, random occlusion masking and shadow modelling can enhance robustness without computational overhead. Further, predictions in complicated field conditions and uneven lighting can be stabilized by multimodel fusion techniques that combine spectral and environmental data with RGB images (<xref ref-type="bibr" rid="B54">Ruiz-G&#xf3;mez et&#xa0;al., 2021</xref>).</p>
<p>All these findings reveal that, when lightweight models like MobileNetV2 and Efficient Net-B0 are combined with suitable augmentation and preprocessing techniques, are most suitable for edge-AI deployment. In order to enhance the shift from controlled benchmarks to operational agricultural systems, future research should be focused on verifying these methods using real time field datasets across different agro-climatic regions.</p>
</sec>
<sec id="s4_5">
<label>4.5</label>
<title>Implications for sustainable digital agriculture</title>
<p>The results of this experimental validation shows that combining lightweight deep learning models with sustainable disease management is practically feasible. The strong efficiency and accuracy of MobileNetV2 and EfficientNet-B0 shows the potential of edge-AI systems for offline and real-time disease detection, even in remote agricultural areas. These models can offer data-driven, environment friendly methods of managing pests and diseases when combined with microbial biocontrol strategies and decision-support systems.</p>
<p>Overall, this present work shows that lightweight CNN models based on transfer learning can attain superior performance while continuing to be deployable and interpretable on low power devices. By incorporating these AI-based disease diagnostic tools into precision agriculture, more scalable and resilient food production systems may develop.</p>
</sec>
</sec>
<sec id="s5">
<label>5</label>
<title>Challenges and future prospects</title>
<p>Although there are a numerous advancements in AI-based disease diagnosis and microbial biocontrol strategies, their deployment is still hindered by many field level issues. The first and foremost challenge is quality and diversity of datasets, utilized for lightweight deep learning models. Most standard public datasets like PlantVillage has high-resolution images which is more suitable for training but they are captured in controlled environments with even lighting. These datasets lack field level variations such as uneven lighting, background noises, occluded leaves and overlapping symptoms. This difference between standard datasets and real-time scenarios has resulted in the performance gap in our study where model accuracy was lower than published standards due to dataset imbalance and restricted class variety. Therefore, in future research, priority should be given to the development of regionally varied, field-annotated datasets that records local disease expression and environmental factors and viral infections carried by insects.</p>
<p>The next challenge is the hardware deployment constraints. Though lightweight models like MobileNet shows high efficiency, their implementation on edge devices like NVIDIA Jetson Nano, Raspberry Pi, and Android smartphones are still constrained by computational power, battery capacity and thermal management. In high-resolution or real-time applications, inference delays can lower the reliability of diagnosis. Future research should address this issue by combining novel methods like TinyML and hardware-aware designs with model optimization methods including pruning, quantization,  knowledge distillation, and neural architecture search (NAS) to produce powerful low-power AI models.</p>
<p>Although AI models are better at identifying diseases there is still little interaction of these models with microbial biocontrol strategies. To address this gap, we have proposed a framework (<xref ref-type="fig" rid="f6"><bold>Figure&#xa0;6</bold></xref>) which combines AI diagnosis with microbial recommendations. But in order to achieve this synergy, microorganisms like <italic>Trichoderma</italic> spp, <italic>Bacillus subtilis</italic> and <italic>Beauveria bassiana</italic> must be used at the right time before the visible symptoms occur. To provide consistency, reliability and efficiency, microbial consortia must be customized for each region and field validated against different agro-climatic zones. Additional approach is combining these microbial agents with proper delivery systems like drone mounted dispensers or GPS sprayers.</p>
<p>Lastly, adoption by farmers still depends on trust and interpretability. It is common among farmers and extensive agents to accept the black-box predictions. Combining explainable AI (XAI) methods like Grad-CAM with mobile apps can increase transparency because they emphasize the leaf region that influenced the model prediction. Accessibility can be further improved by voice-assisted, multilingual and offline compatible interfaces in rural areas. In additon to technological advances, supportive institutional frameworks are needed to promote open datasets, standardize field evaluation protocols, and build farmer training programs.</p>
<p>In conclusion, the combination of sustainable microbial biocontrol strategies and lightweight AI models holds the key to manage tomato leaf diseases in the future. Moreover, in order to develop reliable, scalable and farmer-specific solutions, addressing dataset imbalance, hardware constraints, interpretability, and integration challenges are necessary that aligns with the aims of precision agriculture and sustainable crop protection.</p>
</sec>
<sec id="s6" sec-type="conclusions">
<label>6</label>
<title>Conclusion</title>
<p>New approaches for managing tomato diseases have been made possible by the convergence of artificial intelligence and sustainable crop protection. Deep learning approaches, particularly CNNs and transformer-based models have demonstrated great potential in automating leaf disease diagnosis from images, acting as a substitute for conventional labor-intensive methods. Moreover, real-time, on-field disease diagnosis on edge devices is made possible as a result of the advancements in lightweight architectures and other compression techniques. However, successful field deployment is still constrained by issues like model generalizability, dataset diversity and hardware constraints.</p>
<p>Simultaneously, microbial biocontrol agents are becoming more and more popular as environmentally accepted substitutes for synthetic pesticides. But their efficacy is relies heavily on timely and targeted application. This review highlights the unexplored potential of combining microbial biocontrol strategies with AI-based early detection to take focused and preventive actions. Development of field-representative datasets, enhancing model interpretability and creating closed loop system that connects diagnosis with smart decision support are all necessary for future advancements in this domain.</p>
<p>Our proposed framework in this study adds to this goal by combining microbial biocontrol techniques with AI-based early disease detection which goes a step forward than previous studies which addresses these components separately. This study provides an useful path for practical implementation by combining sustainable treatment and diagnosis in a single decision support system. This connection of intelligence and sustainability holds the key for managing tomato diseases and also opens the door to a new era of precision agriculture.</p>
</sec>
</body>
<back>
<sec id="s7" sec-type="author-contributions">
<title>Author contributions</title>
<p>HG: Writing &#x2013; original draft, Writing &#x2013; review &amp; editing. SR: Supervision, Writing &#x2013; review &amp; editing. LB: Supervision, Writing &#x2013; review &amp; editing.</p></sec>
<sec id="s9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The author(s) declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p></sec>
<sec id="s10" sec-type="ai-statement">
<title>Generative AI statement</title>
<p>The author(s) declared that generative AI was used in the creation of this manuscript. Generative AI tools (ChatGPT, OpenAI) were used to assist with language refinement, grammar improvement, and restructuring of certain paragraphs. All scientific content, study design, analyses, interpretations, and conclusions were developed entirely by the authors.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p></sec>
<sec id="s11" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p></sec>
<ref-list>
<title>References</title>
<ref id="B1">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Agarwal</surname> <given-names>A.</given-names></name>
<name><surname>Gupta</surname> <given-names>A.</given-names></name>
<name><surname>Sehgal</surname> <given-names>M.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Lightweight CNN architectures for tomato disease detection under field conditions</article-title>. <source>J. Plant Pathol.</source> <volume>105</volume>, <fpage>1201</fpage>&#x2013;<lpage>1214</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s42161-023-01423-8</pub-id>
</mixed-citation>
</ref>
<ref id="B2">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Agha</surname> <given-names>S. I.</given-names></name>
<name><surname>Ullah</surname> <given-names>M.</given-names></name>
<name><surname>Khan</surname> <given-names>A.</given-names></name>
<name><surname>Jahan</surname> <given-names>N.</given-names></name>
<name><surname>Hamdard</surname> <given-names>M. H.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Biocontrol rhizobacteria enhances growth and yield of wheat (Triticum aestivum) under field conditions against Fusarium oxysporum</article-title>. <source>Bioengineered</source> <volume>14</volume>, <elocation-id>2260923</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/21655979.2023.2260923</pub-id>, PMID: <pub-id pub-id-type="pmid">37791524</pub-id>
</mixed-citation>
</ref>
<ref id="B3">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ahmed</surname> <given-names>S.</given-names></name>
<name><surname>Hasan</surname> <given-names>M. B.</given-names></name>
<name><surname>Ahmed</surname> <given-names>T.</given-names></name>
<name><surname>Sony</surname> <given-names>R. K.</given-names></name>
<name><surname>Kabir</surname> <given-names>M. H.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Less is more: lighter and faster deep neural architecture for tomato leaf disease classification [Preprint</article-title>. <source>arXiv</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.2109.02394</pub-id>
</mixed-citation>
</ref>
<ref id="B4">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Alshammari</surname> <given-names>S. A.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>TLDViT: A vision transformer model for tomato leaf disease classification</article-title>. <source>Int. J. Advanced Comput. Sci. Appl.</source> <volume>15</volume>, <fpage>85</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.14569/IJACSA.2024.0151285</pub-id>
</mixed-citation>
</ref>
<ref id="B5">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Bebber</surname> <given-names>D. P.</given-names></name>
<name><surname>Ramotowski</surname> <given-names>M. A. T.</given-names></name>
<name><surname>Gurr</surname> <given-names>S. J.</given-names></name>
</person-group> (<year>2013</year>). 
<article-title>Crop pests and pathogens move polewards in a warming world</article-title>. <source>Nat. Climate Change</source> <volume>3</volume>, <fpage>985</fpage>&#x2013;<lpage>988</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/nclimate1990</pub-id>
</mixed-citation>
</ref>
<ref id="B6">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ben&#xed;tez</surname> <given-names>T.</given-names></name>
<name><surname>Rinc&#xf3;n</surname> <given-names>A. M.</given-names></name>
<name><surname>Lim&#xf3;n</surname> <given-names>M. C.</given-names></name>
<name><surname>Cod&#xf3;n</surname> <given-names>A. C.</given-names></name>
</person-group> (<year>2004</year>). 
<article-title>Biocontrol mechanisms of Trichoderma strains</article-title>. <source>Int. Microbiol.</source> <volume>7</volume>, <fpage>249</fpage>&#x2013;<lpage>260</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s10123-004-0155-1</pub-id>, PMID: <pub-id pub-id-type="pmid">15666245</pub-id>
</mixed-citation>
</ref>
<ref id="B7">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Bock</surname> <given-names>C. H.</given-names></name>
<name><surname>Poole</surname> <given-names>G. H.</given-names></name>
<name><surname>Parker</surname> <given-names>P. E.</given-names></name>
<name><surname>Gottwald</surname> <given-names>T. R.</given-names></name>
</person-group> (<year>2010</year>). 
<article-title>Plant disease severity estimated visually, by digital photography and image analysis, and by hyperspectral imaging</article-title>. <source>Crit. Rev. Plant Sci.</source> <volume>29</volume>, <fpage>59</fpage>&#x2013;<lpage>107</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/07352681003617285</pub-id>
</mixed-citation>
</ref>
<ref id="B8">
<mixed-citation publication-type="web">
<person-group person-group-type="author">
<name><surname>Chaudhry</surname> <given-names>C.</given-names></name>
</person-group> (<year>2021</year>). <source>PlantVillage Tomato Leaf Dataset (Version 1) [Data set]</source> (
<publisher-name>Kaggle</publisher-name>). Available online at: <uri xlink:href="https://www.kaggle.com/datasets/charuchaudhry/plantvillage-tomato-leaf-dataset">https://www.kaggle.com/datasets/charuchaudhry/plantvillage-tomato-leaf-dataset</uri>.
</mixed-citation>
</ref>
<ref id="B9">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Cheng</surname> <given-names>Y.</given-names></name>
<name><surname>Wang</surname> <given-names>D.</given-names></name>
<name><surname>Zhou</surname> <given-names>P.</given-names></name>
<name><surname>Zhang</surname> <given-names>T.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>A survey of model compression and acceleration for deep neural networks</article-title>. <source>arXiv preprint arXiv:1710.09282</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.1710.09282</pub-id>
</mixed-citation>
</ref>
<ref id="B10">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chowdhury</surname> <given-names>S. P.</given-names></name>
<name><surname>Hartmann</surname> <given-names>A.</given-names></name>
<name><surname>Gao</surname> <given-names>X.</given-names></name>
<name><surname>Borriss</surname> <given-names>R.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Biocontrol mechanism by root-associated Bacillus amyloliquefaciens FZB42 &#x2013; a review</article-title>. <source>Front. Microbiol.</source> <volume>6</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fmicb.2015.00780</pub-id>, PMID: <pub-id pub-id-type="pmid">26284057</pub-id>
</mixed-citation>
</ref>
<ref id="B11">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Dai</surname> <given-names>Z.</given-names></name>
<name><surname>Liu</surname> <given-names>H.</given-names></name>
<name><surname>Le</surname> <given-names>Q. V.</given-names></name>
<name><surname>Tan</surname> <given-names>M.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>CoAtNet: Marrying convolution and attention for all data sizes</article-title>. <source>arXiv e-prints</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.2106.04803</pub-id>
</mixed-citation>
</ref>
<ref id="B12">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>De Silva</surname> <given-names>M.</given-names></name>
<name><surname>Brown</surname> <given-names>D. L.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Multispectral plant disease detection with vision transformer&#x2013;convolutional neural network hybrid approaches</article-title>. <source>Sensors</source> <volume>23</volume>, <elocation-id>8531</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s23208531</pub-id>, PMID: <pub-id pub-id-type="pmid">37896623</pub-id>
</mixed-citation>
</ref>
<ref id="B13">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Dosovitskiy</surname> <given-names>A.</given-names></name>
<name><surname>Beyer</surname> <given-names>L.</given-names></name>
<name><surname>Kolesnikov</surname> <given-names>A.</given-names></name>
<name><surname>Weissenborn</surname> <given-names>D.</given-names></name>
<name><surname>Zhai</surname> <given-names>X.</given-names></name>
<name><surname>Unterthiner</surname> <given-names>T.</given-names></name>
<etal/>
</person-group>. (<year>2021</year>). &#x201c;
<article-title>An image is worth 16&#xd7;16 words: Transformers for image recognition at scale</article-title>,&#x201d; in <conf-name>International Conference on Learning Representations (ICLR)</conf-name>. (<publisher-loc>San Francisco, CA, United States</publisher-loc>: 
<publisher-name>OpenReview</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.2010.11929</pub-id>
</mixed-citation>
</ref>
<ref id="B14">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Durmu&#x15f;</surname> <given-names>H.</given-names></name>
<name><surname>G&#xfc;ne&#x15f;</surname> <given-names>E. O.</given-names></name>
<name><surname>Kirci</surname> <given-names>M.</given-names></name>
</person-group> (<year>2017</year>). &#x201c;
<article-title>Disease detection on the leaves of tomato plants using deep learning</article-title>,&#x201d; in <conf-name>2017 6th International Conference on Agro-Geoinformatics</conf-name>. <fpage>1</fpage>&#x2013;<lpage>5</lpage> (<publisher-loc>New York, NY, United States</publisher-loc>: 
<publisher-name>IEEE</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.1109/Agro-Geoinformatics.2017.8047016</pub-id>
</mixed-citation>
</ref>
<ref id="B15">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Fargues</surname> <given-names>J.</given-names></name>
<name><surname>Goettel</surname> <given-names>M.</given-names></name>
<name><surname>Smits</surname> <given-names>N.</given-names></name>
<name><surname>Ouedraogo</surname> <given-names>A.</given-names></name>
</person-group> (<year>2012</year>). 
<article-title>Effect of temperature and humidity on Beauveria bassiana against whiteflies</article-title>. <source>J. Invertebrate Pathol.</source> <volume>110</volume>, <fpage>143</fpage>&#x2013;<lpage>151</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jip.2012.03.009</pub-id>, PMID: <pub-id pub-id-type="pmid">22433999</pub-id>
</mixed-citation>
</ref>
<ref id="B16">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ferentinos</surname> <given-names>K. P.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>Deep learning models for plant disease detection and diagnosis</article-title>. <source>Comput. Electron. Agric.</source> <volume>145</volume>, <fpage>311</fpage>&#x2013;<lpage>318</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2018.01.009</pub-id>
</mixed-citation>
</ref>
<ref id="B17">
<mixed-citation publication-type="book">
<person-group person-group-type="author"><collab>Food and Agriculture Organization of the United Nations</collab>
</person-group> (<year>2021</year>). <source>FAOSTAT statistical database</source> (<publisher-loc>Rome, Italy</publisher-loc>: 
<publisher-name>FAO</publisher-name>).
</mixed-citation>
</ref>
<ref id="B18">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Fuentes</surname> <given-names>A.</given-names></name>
<name><surname>Yoon</surname> <given-names>S.</given-names></name>
<name><surname>Park</surname> <given-names>D. S.</given-names></name>
<name><surname>Kim</surname> <given-names>S.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>A robust deep-learning-based detector for real-time tomato plant diseases and pests recognition</article-title>. <source>Comput. Electron. Agric.</source> <volume>142</volume>, <fpage>241</fpage>&#x2013;<lpage>251</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2017.09.001</pub-id>, PMID: <pub-id pub-id-type="pmid">28869539</pub-id>
</mixed-citation>
</ref>
<ref id="B19">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Garrett</surname> <given-names>K. A.</given-names></name>
<name><surname>Dendy</surname> <given-names>S. P.</given-names></name>
<name><surname>Frank</surname> <given-names>E. E.</given-names></name>
<name><surname>Rouse</surname> <given-names>M. N.</given-names></name>
<name><surname>Travers</surname> <given-names>S. E.</given-names></name>
</person-group> (<year>2006</year>). 
<article-title>Climate change effects on plant disease: Genomes to ecosystems</article-title>. <source>Annu. Rev. Phytopathol.</source> <volume>44</volume>, <fpage>489</fpage>&#x2013;<lpage>509</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1146/annurev.phyto.44.070505.143420</pub-id>, PMID: <pub-id pub-id-type="pmid">16722808</pub-id>
</mixed-citation>
</ref>
<ref id="B20">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gonz&#xe1;lez-Briones</surname> <given-names>A.</given-names></name>
<name><surname>Florez</surname> <given-names>S. L.</given-names></name>
<name><surname>Chamoso</surname> <given-names>P.</given-names></name>
<name><surname>Castillo-Ossa</surname> <given-names>H.</given-names></name>
<name><surname>CorChado</surname> <given-names>S.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Enhancing plant disease detection: Incorporating advanced CNN architectures for better accuracy and interpretability</article-title>. <source>Int. J. Comput. Intell. Syst.</source> <volume>18</volume>, <fpage>Article 120</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s44196-025-00835-2</pub-id>
</mixed-citation>
</ref>
<ref id="B21">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Guzm&#xe1;n-Guzm&#xe1;n</surname> <given-names>P.</given-names></name>
<name><surname>Kumar</surname> <given-names>A.</given-names></name>
<name><surname>de los Santos-Villalobos</surname> <given-names>S.</given-names></name>
<name><surname>Parra-Cota</surname> <given-names>F. I.</given-names></name>
<name><surname>Orozco-Mosqueda</surname> <given-names>M.</given-names></name>
<name><surname>d.</surname> <given-names>C.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Trichoderma species: our best fungal allies in the biocontrol of plant diseases&#x2014;A review</article-title>. <source>Plants</source> <volume>12</volume>, <elocation-id>432</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/plants12030432</pub-id>, PMID: <pub-id pub-id-type="pmid">36771517</pub-id>
</mixed-citation>
</ref>
<ref id="B22">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Han</surname> <given-names>S.</given-names></name>
<name><surname>Mao</surname> <given-names>H.</given-names></name>
<name><surname>Dally</surname> <given-names>W. J.</given-names></name>
</person-group> (<year>2016</year>). &#x201c;
<article-title>Deep compression: Compressing deep neural networks with pruning, trained quantization and Huffman coding</article-title>,&#x201d; in <conf-name>International Conference on Learning Representations (ICLR)</conf-name>. (<publisher-loc>San Francisco, CA, United States</publisher-loc>: 
<publisher-name>OpenReview</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.1510.00149</pub-id>
</mixed-citation>
</ref>
<ref id="B23">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Hanssen</surname> <given-names>I. M.</given-names></name>
<name><surname>Lapidot</surname> <given-names>M.</given-names></name>
<name><surname>Thomma</surname> <given-names>B. P. H. J.</given-names></name>
</person-group> (<year>2010</year>). 
<article-title>Emerging viral diseases of tomato crops</article-title>. <source>Mol. Plant-Microbe Interact.</source> <volume>23</volume>, <fpage>539</fpage>&#x2013;<lpage>548</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1094/MPMI-23-5-0539</pub-id>, PMID: <pub-id pub-id-type="pmid">20367462</pub-id>
</mixed-citation>
</ref>
<ref id="B24">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Haque</surname> <given-names>M.</given-names></name>
<name><surname>Rahman</surname> <given-names>K.</given-names></name>
<name><surname>Tarek</surname> <given-names>A.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Environmental sensor fusion with lightweight CNNs improves field-level disease prediction in tomato crops</article-title>. <source>Precis. Agric.</source> <volume>23</volume>, <fpage>1412</fpage>&#x2013;<lpage>1428</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11119-022-09899-8</pub-id>
</mixed-citation>
</ref>
<ref id="B25">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Harman</surname> <given-names>G. E.</given-names></name>
<name><surname>Howell</surname> <given-names>C. R.</given-names></name>
<name><surname>Viterbo</surname> <given-names>A.</given-names></name>
<name><surname>Chet</surname> <given-names>I.</given-names></name>
<name><surname>Lorito</surname> <given-names>M.</given-names></name>
</person-group> (<year>2004</year>). 
<article-title>Trichoderma species&#x2014;Opportunistic, avirulent plant symbionts</article-title>. <source>Nat. Rev. Microbiol.</source> <volume>2</volume>, <fpage>43</fpage>&#x2013;<lpage>56</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/nrmicro797</pub-id>, PMID: <pub-id pub-id-type="pmid">15035008</pub-id>
</mixed-citation>
</ref>
<ref id="B26">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Hayward</surname> <given-names>A. C.</given-names></name>
</person-group> (<year>1991</year>). 
<article-title>Biology and epidemiology of bacterial wilt caused by Pseudomonas solanacearum</article-title>. <source>Annu. Rev. Phytopathol.</source> <volume>29</volume>, <fpage>65</fpage>&#x2013;<lpage>87</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1146/annurev.py.29.090191.000433</pub-id>, PMID: <pub-id pub-id-type="pmid">18479193</pub-id>
</mixed-citation>
</ref>
<ref id="B27">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Hinton</surname> <given-names>G.</given-names></name>
<name><surname>Vinyals</surname> <given-names>O.</given-names></name>
<name><surname>Dean</surname> <given-names>J.</given-names></name>
</person-group> (<year>2015</year>). &#x201c;
<article-title>Distilling the knowledge in a neural network</article-title>,&#x201d; in <source>NIPS Deep Learning and Representation Learning Workshop</source>. (<publisher-loc>Ithaca, NY, United States</publisher-loc>: 
<publisher-name>Cornell University (arXiv)</publisher-name>) Available online at: <uri xlink:href="https://arxiv.org/abs/1503.02531">https://arxiv.org/abs/1503.02531</uri> (Accessed <date-in-citation content-type="access-date">January 11, 2026</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B28">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Howard</surname> <given-names>A. G.</given-names></name>
<name><surname>Zhu</surname> <given-names>M.</given-names></name>
<name><surname>Chen</surname> <given-names>B.</given-names></name>
<name><surname>Kalenichenko</surname> <given-names>D.</given-names></name>
<name><surname>Wang</surname> <given-names>W.</given-names></name>
<name><surname>Weyand</surname> <given-names>T.</given-names></name>
<etal/>
</person-group>. (<year>2017</year>). 
<article-title>MobileNets: Efficient convolutional neural networks for mobile vision applications</article-title>. <source>arXiv preprint arXiv:1704.04861</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.1704.04861</pub-id>
</mixed-citation>
</ref>
<ref id="B29">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Hughes</surname> <given-names>D. P.</given-names></name>
<name><surname>Salath&#xe9;</surname> <given-names>M.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>An open access repository of images on plant health to enable the development of mobile disease diagnostics</article-title>. <source>arXiv preprint arXiv:1511.08060</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.1511.08060</pub-id>
</mixed-citation>
</ref>
<ref id="B30">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Jacob</surname> <given-names>B.</given-names></name>
<name><surname>Kligys</surname> <given-names>S.</given-names></name>
<name><surname>Chen</surname> <given-names>B.</given-names></name>
<name><surname>Zhu</surname> <given-names>M.</given-names></name>
<name><surname>Tang</surname> <given-names>M.</given-names></name>
<name><surname>Howard</surname> <given-names>A.</given-names></name>
<etal/>
</person-group>. (<year>2018</year>). &#x201c;
<article-title>Quantization and training of neural networks for efficient integer-arithmetic-only inference</article-title>,&#x201d; in <conf-name>Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition (CVPR)</conf-name>. (<publisher-loc>New York, NY, United States</publisher-loc>: 
<publisher-name>IEEE</publisher-name>). <fpage>2704</fpage>&#x2013;<lpage>2713</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/CVPR.2018.00286</pub-id>
</mixed-citation>
</ref>
<ref id="B31">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jia</surname> <given-names>Y.</given-names></name>
<name><surname>Li</surname> <given-names>J.</given-names></name>
<name><surname>Niu</surname> <given-names>H.</given-names></name>
<name><surname>Ruan</surname> <given-names>J.</given-names></name>
<name><surname>Chen</surname> <given-names>J.</given-names></name>
<name><surname>Li</surname> <given-names>Y.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Synergistic antagonism mechanism of <italic>Bacillus</italic>-<italic>Pseudomonas</italic> consortium against <italic>Alternaria solani</italic></article-title>. <source>Eur. J. Plant Pathol.</source> <volume>167</volume>, <fpage>715</fpage>&#x2013;<lpage>726</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s10658-023-02747-3</pub-id>
</mixed-citation>
</ref>
<ref id="B32">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jiao</surname> <given-names>X.</given-names></name>
<name><surname>Takishita</surname> <given-names>Y.</given-names></name>
<name><surname>Zhou</surname> <given-names>G.</given-names></name>
<name><surname>Smith</surname> <given-names>D. L.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Plant associated rhizobacteria for biocontrol and plant growth enhancement</article-title>. <source>Front. Plant Sci.</source> <volume>12</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2021.634796</pub-id>, PMID: <pub-id pub-id-type="pmid">33815442</pub-id>
</mixed-citation>
</ref>
<ref id="B33">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Jones</surname> <given-names>J. B.</given-names></name>
<name><surname>Zitter</surname> <given-names>T. A.</given-names></name>
<name><surname>Momol</surname> <given-names>M. T.</given-names></name>
<name><surname>Miller</surname> <given-names>S. A.</given-names></name>
</person-group> (<year>2014</year>). <source>Compendium of tomato diseases and pests (2nd ed.)</source> (<publisher-loc>St. Paul, MN</publisher-loc>: 
<publisher-name>American Phytopathological Society Press</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.1094/9780890544341</pub-id>
</mixed-citation>
</ref>
<ref id="B34">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kamilaris</surname> <given-names>A.</given-names></name>
<name><surname>Prenafeta-Bold&#xfa;</surname> <given-names>F. X.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>Deep learning in agriculture: A survey</article-title>. <source>Comput. Electron. Agric.</source> <volume>147</volume>, <fpage>70</fpage>&#x2013;<lpage>90</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2018.02.016</pub-id>
</mixed-citation>
</ref>
<ref id="B35">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Khan</surname> <given-names>A.</given-names></name>
<name><surname>Williams</surname> <given-names>K.</given-names></name>
<name><surname>Roberts</surname> <given-names>J.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Biological control of root-knot nematodes using Purpureocillium lilacinum</article-title>. <source>Crop Prot.</source> <volume>118</volume>, <fpage>75</fpage>&#x2013;<lpage>82</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.cropro.2018.12.004</pub-id>
</mixed-citation>
</ref>
<ref id="B36">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Koirala</surname> <given-names>A.</given-names></name>
<name><surname>Walsh</surname> <given-names>K.</given-names></name>
<name><surname>Wang</surname> <given-names>Z.</given-names></name>
<name><surname>McCarthy</surname> <given-names>C.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Lightweight multimodal fusion networks for real-time plant disease detection on edge devices</article-title>. <source>IEEE Access</source> <volume>11</volume>, <fpage>45512</fpage>&#x2013;<lpage>45525</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2023.3275038</pub-id>
</mixed-citation>
</ref>
<ref id="B37">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lacey</surname> <given-names>L. A.</given-names></name>
<name><surname>Grzywacz</surname> <given-names>D.</given-names></name>
<name><surname>Shapiro-Ilan</surname> <given-names>D. I.</given-names></name>
<name><surname>Frutos</surname> <given-names>R.</given-names></name>
<name><surname>Brownbridge</surname> <given-names>M.</given-names></name>
<name><surname>Goettel</surname> <given-names>M. S.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Insect pathogens as biological control agents: Back to the future</article-title>. <source>J. Invertebrate Pathol.</source> <volume>132</volume>, <fpage>1</fpage>&#x2013;<lpage>41</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jip.2015.07.009</pub-id>, PMID: <pub-id pub-id-type="pmid">26225455</pub-id>
</mixed-citation>
</ref>
<ref id="B38">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lapidot</surname> <given-names>M.</given-names></name>
<name><surname>Friedmann</surname> <given-names>M.</given-names></name>
</person-group> (<year>2002</year>). 
<article-title>Breeding for resistance to whitefly-transmitted geminiviruses</article-title>. <source>Ann. Appl. Biol.</source> <volume>140</volume>, <fpage>109</fpage>&#x2013;<lpage>127</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/j.1744-7348.2002.tb00163.x</pub-id>
</mixed-citation>
</ref>
<ref id="B39">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>G.</given-names></name>
<name><surname>Ma</surname> <given-names>X.</given-names></name>
<name><surname>Wang</surname> <given-names>X.</given-names></name>
<name><surname>Liu</surname> <given-names>L.</given-names></name>
<name><surname>Xue</surname> <given-names>J.</given-names></name>
<name><surname>Feng</surname> <given-names>X.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Fusion-catalyzed pruning for optimizing deep learning on intelligent edge devices</article-title>. <source>arXiv preprint</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.2010.16165</pub-id>
</mixed-citation>
</ref>
<ref id="B40">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>G.</given-names></name>
<name><surname>Wang</surname> <given-names>Y.</given-names></name>
<name><surname>Zhao</surname> <given-names>Q.</given-names></name>
<name><surname>Yuan</surname> <given-names>P.</given-names></name>
<name><surname>Chang</surname> <given-names>B.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>PMVT: A lightweight vision transformer for plant disease identification on mobile devices</article-title>. <source>Front. Plant Sci.</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2023.1256773</pub-id>, PMID: <pub-id pub-id-type="pmid">37822342</pub-id>
</mixed-citation>
</ref>
<ref id="B41">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>Z.</given-names></name>
<name><surname>Paolieri</surname> <given-names>M.</given-names></name>
<name><surname>Golubchik</surname> <given-names>L.</given-names></name>
</person-group> (<year>2023</year>). &#x201c;
<article-title>Predicting inference latency of neural architectures on mobile devices</article-title>,&#x201d; in <conf-name>Proceedings of the 2023 ACM/SPEC International Conference on Performance Engineering</conf-name>. <fpage>99</fpage>&#x2013;<lpage>112</lpage> (<publisher-loc>New York, NY, United States</publisher-loc>: 
<publisher-name>ACM</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.1145/3578244.3583735</pub-id>
</mixed-citation>
</ref>
<ref id="B42">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lundberg</surname> <given-names>S. M.</given-names></name>
<name><surname>Lee</surname> <given-names>S.-I.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>A unified approach to interpreting model predictions</article-title>. <source>Adv. Neural Inf. Process. Syst. (NeurIPS)</source> <volume>30</volume>, <fpage>4765</fpage>&#x2013;<lpage>4774</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.1705.07874</pub-id>
</mixed-citation>
</ref>
<ref id="B43">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ma</surname> <given-names>C.-S.</given-names></name>
<name><surname>Zhang</surname> <given-names>W.</given-names></name>
<name><surname>Peng</surname> <given-names>Y.</given-names></name>
<name><surname>Zhao</surname> <given-names>F.</given-names></name>
<name><surname>Chang</surname> <given-names>X.-Q.</given-names></name>
<name><surname>Xing</surname> <given-names>K.</given-names></name>
<etal/>
</person-group>. (<year>2021</year>). 
<article-title>Climate warming promotes pesticide resistance through expanding overwintering range of a global pest</article-title>. <source>Nat. Commun.</source> <volume>12</volume>, <fpage>6215</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41467-021-25505-7</pub-id>, PMID: <pub-id pub-id-type="pmid">34504063</pub-id>
</mixed-citation>
</ref>
<ref id="B44">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Mazzia</surname> <given-names>V.</given-names></name>
<name><surname>Khaliq</surname> <given-names>A.</given-names></name>
<name><surname>Salvetti</surname> <given-names>F.</given-names></name>
<name><surname>Chiaberge</surname> <given-names>M.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Real-time apple detection system using embedded systems with hardware accelerators: An edge AI application</article-title>. <source>IEEE Access</source> <volume>8</volume>, <fpage>9102</fpage>&#x2013;<lpage>9114</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2020.2964608</pub-id>
</mixed-citation>
</ref>
<ref id="B45">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Mohanty</surname> <given-names>S. P.</given-names></name>
<name><surname>Hughes</surname> <given-names>D. P.</given-names></name>
<name><surname>Salath&#xe9;</surname> <given-names>M.</given-names></name>
</person-group> (<year>2016</year>). 
<article-title>Using deep learning for image-based plant disease detection</article-title>. <source>Front. Plant Sci.</source> <volume>7</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2016.01419</pub-id>, PMID: <pub-id pub-id-type="pmid">27713752</pub-id>
</mixed-citation>
</ref>
<ref id="B46">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Mustofa</surname> <given-names>S.</given-names></name>
<name><surname>Munna</surname> <given-names>M. M. H.</given-names></name>
<name><surname>Emon</surname> <given-names>Y. R.</given-names></name>
<name><surname>Rabbany</surname> <given-names>G.</given-names></name>
<name><surname>Ahad</surname> <given-names>M. T.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>A comprehensive review on Plant Leaf Disease detection using Deep learning</article-title>. <source>arXiv preprint arXiv:2308.14087</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.2308.14087</pub-id>
</mixed-citation>
</ref>
<ref id="B47">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Nishankar</surname> <given-names>M.</given-names></name>
<name><surname>Pavindran</surname> <given-names>V.</given-names></name>
<name><surname>Mithuran</surname> <given-names>T.</given-names></name>
<name><surname>Nimishan</surname> <given-names>S.</given-names></name>
<name><surname>Thuseethan</surname> <given-names>S.</given-names></name>
<name><surname>Sebastian</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>ViT-RoT: vision transformer-based robust framework for tomato leaf disease recognition</article-title>. <source>Sensors</source> <volume>25</volume>, <fpage>185</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriengineering7060185</pub-id>
</mixed-citation>
</ref>
<ref id="B48">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Nowicki</surname> <given-names>M.</given-names></name>
<name><surname>Foolad</surname> <given-names>M. R.</given-names></name>
<name><surname>Nowakowska</surname> <given-names>M.</given-names></name>
<name><surname>Kozik</surname> <given-names>E. U.</given-names></name>
</person-group> (<year>2012</year>). 
<article-title>Potato and tomato late blight caused by Phytophthora infestans: An overview of pathology and resistance breeding</article-title>. <source>Plant Dis.</source> <volume>96</volume>, <fpage>4</fpage>&#x2013;<lpage>17</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1094/PDIS-05-11-0458</pub-id>, PMID: <pub-id pub-id-type="pmid">30731850</pub-id>
</mixed-citation>
</ref>
<ref id="B49">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Park</surname> <given-names>J.</given-names></name>
<name><surname>Kim</surname> <given-names>S.</given-names></name>
<name><surname>Yoon</surname> <given-names>D.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Edge-computing framework for plant stress detection using ESP32-integrated NIR sensors</article-title>. <source>Sensors</source> <volume>23</volume>, <elocation-id>2041</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s23042041</pub-id>, PMID: <pub-id pub-id-type="pmid">36850639</pub-id>
</mixed-citation>
</ref>
<ref id="B50">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Peng</surname> <given-names>Z.</given-names></name>
<name><surname>Huang</surname> <given-names>W.</given-names></name>
<name><surname>Gu</surname> <given-names>S.</given-names></name>
<name><surname>Xie</surname> <given-names>L.</given-names></name>
<name><surname>Wang</surname> <given-names>Y.</given-names></name>
<name><surname>Jiao</surname> <given-names>J.</given-names></name>
<etal/>
</person-group>. (<year>2021</year>). 
<article-title>Conformer: Local features coupling global representations for visual recognition</article-title>. <source>arXiv e-prints</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.2105.03889</pub-id>
</mixed-citation>
</ref>
<ref id="B51">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Pimentel</surname> <given-names>D.</given-names></name>
<name><surname>Burgess</surname> <given-names>M.</given-names></name>
</person-group> (<year>2014</year>). 
<article-title>Environmental and economic costs of the application of pesticides primarily in the United States</article-title>. In: 
<person-group person-group-type="editor">
<name><surname>Pimentel</surname> <given-names>D.</given-names></name>
<name><surname>Peshin</surname> <given-names>R.</given-names></name>
</person-group> (eds). <source>Integrated Pest Manage</source>. (<publisher-loc>Dordrecht</publisher-loc>: 
<publisher-name>Springer</publisher-name>), <fpage>47</fpage>&#x2013;<lpage>71</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/978-94-007-7796-5_2</pub-id>
</mixed-citation>
</ref>
<ref id="B52">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Pretty</surname> <given-names>J.</given-names></name>
<name><surname>Bharucha</surname> <given-names>Z. P.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Integrated pest management for sustainable intensification of agriculture in Asia and Africa</article-title>. <source>Insects</source> <volume>6</volume>, <fpage>152</fpage>&#x2013;<lpage>182</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/insects6010152</pub-id>, PMID: <pub-id pub-id-type="pmid">26463073</pub-id>
</mixed-citation>
</ref>
<ref id="B53">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Rejeb</surname> <given-names>A.</given-names></name>
<name><surname>Abdollahi</surname> <given-names>A.</given-names></name>
<name><surname>Rejeb</surname> <given-names>K.</given-names></name>
<name><surname>Treiblmaier</surname> <given-names>H.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Drones in agriculture: A review and bibliometric analysis</article-title>. <source>Comput. Electron. Agric.</source> <volume>198</volume>, <fpage>Article 107017</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2022.107017</pub-id>
</mixed-citation>
</ref>
<ref id="B54">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ruiz-G&#xf3;mez</surname> <given-names>J.</given-names></name>
<name><surname>Cebri&#xe1;n</surname> <given-names>D.</given-names></name>
<name><surname>S&#xe1;nchez</surname> <given-names>A.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Chlorophyll fluorescence and deep learning fusion for early viral disease identification in tomato plants</article-title>. <source>Plant Phenomics</source> <volume>3</volume>, <elocation-id>100056</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.34133/2021/100056</pub-id>
</mixed-citation>
</ref>
<ref id="B55">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Sandler</surname> <given-names>M.</given-names></name>
<name><surname>Howard</surname> <given-names>A.</given-names></name>
<name><surname>Zhu</surname> <given-names>M.</given-names></name>
<name><surname>Zhmoginov</surname> <given-names>A.</given-names></name>
<name><surname>Chen</surname> <given-names>L.-C.</given-names></name>
</person-group> (<year>2018</year>). &#x201c;
<article-title>MobileNetV2: Inverted residuals and linear bottlenecks</article-title>,&#x201d; in <conf-name>Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition (CVPR)</conf-name>. (<publisher-loc>New York, NY, United States</publisher-loc>: 
<publisher-name>IEEE</publisher-name>) <fpage>4510</fpage>&#x2013;<lpage>4520</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/CVPR.2018.00474</pub-id>
</mixed-citation>
</ref>
<ref id="B56">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Selvaraju</surname> <given-names>R. R.</given-names></name>
<name><surname>Cogswell</surname> <given-names>M.</given-names></name>
<name><surname>Das</surname> <given-names>A.</given-names></name>
<name><surname>Vedantam</surname> <given-names>R.</given-names></name>
<name><surname>Parikh</surname> <given-names>D.</given-names></name>
<name><surname>Batra</surname> <given-names>D.</given-names></name>
</person-group> (<year>2017</year>). &#x201c;
<article-title>Grad-CAM: Visual explanations from deep networks via gradient-based localization</article-title>,&#x201d; in <conf-name>Proceedings of the IEEE International Conference on Computer Vision (ICCV)</conf-name>. <fpage>618</fpage>&#x2013;<lpage>626</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ICCV.2017.74</pub-id>
</mixed-citation>
</ref>
<ref id="B57">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Tan</surname> <given-names>M.</given-names></name>
<name><surname>Le</surname> <given-names>Q. V.</given-names></name>
</person-group> (<year>2019</year>). &#x201c;
<article-title>EfficientNet: Rethinking model scaling for convolutional neural networks</article-title>,&#x201d; in <conf-name>Proceedings of the 36th International Conference on Machine Learning</conf-name>, Vol. <volume>97</volume>. <fpage>6105</fpage>&#x2013;<lpage>6114</lpage> (<publisher-loc>Brookline, MA, United States</publisher-loc>: 
<publisher-name>PMLR</publisher-name>).
</mixed-citation>
</ref>
<ref id="B58">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Thuseethan</surname> <given-names>S.</given-names></name>
<name><surname>Vigneshwaran</surname> <given-names>P.</given-names></name>
<name><surname>Charles</surname> <given-names>J.</given-names></name>
<name><surname>Wimalasooriya</surname> <given-names>C.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Siamese network-based lightweight framework for tomato leaf disease recognition</article-title>. <source>Computers</source> <volume>13</volume> (12), <elocation-id>323</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/computers13120323</pub-id>
</mixed-citation>
</ref>
<ref id="B59">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Timilsina</surname> <given-names>S.</given-names></name>
<name><surname>Potnis</surname> <given-names>N.</given-names></name>
<name><surname>Newberry</surname> <given-names>E. A.</given-names></name>
<name><surname>Liyanapathiranage</surname> <given-names>P.</given-names></name>
<name><surname>Iruegas-Bocardo</surname> <given-names>F.</given-names></name>
<name><surname>White</surname> <given-names>F. F.</given-names></name>
<etal/>
</person-group>. (<year>2020</year>). 
<article-title>Xanthomonas diversity, virulence and plant&#x2013;pathogen interactions</article-title>. <source>Nat. Rev. Microbiol.</source> <volume>18</volume>, <fpage>415</fpage>&#x2013;<lpage>427</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41579-020-0361-8</pub-id>, PMID: <pub-id pub-id-type="pmid">32346148</pub-id>
</mixed-citation>
</ref>
<ref id="B60">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tobiasz</surname> <given-names>R.</given-names></name>
<name><surname>Wilczy&#x144;ski</surname> <given-names>G.</given-names></name>
<name><surname>Graszka</surname> <given-names>P.</given-names></name>
<name><surname>Czechowski</surname> <given-names>N.</given-names></name>
<name><surname>&#x141;uczak</surname> <given-names>S.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Edge devices inference performance comparison</article-title>. <source>arXiv preprint arXiv:2306.12093</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.2306.12093</pub-id>
</mixed-citation>
</ref>
<ref id="B61">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Too</surname> <given-names>E. C.</given-names></name>
<name><surname>Yujian</surname> <given-names>L.</given-names></name>
<name><surname>Njuki</surname> <given-names>S.</given-names></name>
<name><surname>Yingchun</surname> <given-names>L.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>A comparative study of fine-tuning deep learning models for plant disease identification</article-title>. <source>Comput. Electron. Agric.</source> <volume>161</volume>, <fpage>272</fpage>&#x2013;<lpage>279</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2018.03.032</pub-id>
</mixed-citation>
</ref>
<ref id="B62">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>van der Sloot</surname> <given-names>M.</given-names></name>
<name><surname>Maerowitz-Mcmahan</surname> <given-names>S.</given-names></name>
<name><surname>Postma</surname> <given-names>J.</given-names></name>
<name><surname>Limpens</surname> <given-names>J.</given-names></name>
<name><surname>De Deyn</surname> <given-names>G. B.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Soil-borne disease suppressiveness after short and long term application of fermented, composted or fresh organic amendment treatments in arable soils</article-title>. <source>Appl. Soil Ecol.</source> <volume>195</volume>, <fpage>Article 105268</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.apsoil.2023.105268</pub-id>
</mixed-citation>
</ref>
<ref id="B63">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wraight</surname> <given-names>S. P.</given-names></name>
<name><surname>Mason</surname> <given-names>J.</given-names></name>
<name><surname>Sanderson</surname> <given-names>J.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Field performance of Beauveria bassiana for whitefly suppression</article-title>. <source>Biol. Control</source> <volume>149</volume>, <elocation-id>104312</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biocontrol.2020.104312</pub-id>
</mixed-citation>
</ref>
<ref id="B64">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yao</surname> <given-names>X.</given-names></name>
<name><surname>Guo</surname> <given-names>H.</given-names></name>
<name><surname>Zhang</surname> <given-names>K.</given-names></name>
<name><surname>Zhao</surname> <given-names>M.</given-names></name>
<name><surname>Ruan</surname> <given-names>J.</given-names></name>
<name><surname>Chen</surname> <given-names>J.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title><italic>Trichoderma</italic> and its role in biological control of plant fungal and nematode disease</article-title>. <source>Front. Microbiol.</source> <volume>14</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fmicb.2023.1160551</pub-id>, PMID: <pub-id pub-id-type="pmid">37206337</pub-id>
</mixed-citation>
</ref>
<ref id="B65">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>L.</given-names></name>
<name><surname>Chen</surname> <given-names>H.</given-names></name>
<name><surname>Li</surname> <given-names>X.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Multi-spectral sensing for early detection of tomato diseases using micro-NIR and RGB fusion on embedded platforms</article-title>. <source>Comput. Electron. Agric.</source> <volume>198</volume>, <elocation-id>107012</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2022.107012</pub-id>
</mixed-citation>
</ref>
<ref id="B66">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Zuhair</surname> <given-names>M. S. A.</given-names></name>
<name><surname>Widiyanto</surname> <given-names>A.</given-names></name>
<name><surname>Nugroho</surname> <given-names>S.</given-names></name>
</person-group> (<year>2023</year>). &#x201c;
<article-title>Comparison of TensorFlow and TensorFlow Lite for object detection on Raspberry Pi 4 [Conference paper</article-title>,&#x201d; in <conf-name>AIP Conference Proceedings</conf-name>, (<publisher-loc>Melville, NY, United States</publisher-loc>: 
<publisher-name>AIP Publishing</publisher-name>). Vol. <volume>2706</volume>. <fpage>Article 012024</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1063/5.0120245</pub-id>
</mixed-citation>
</ref>
</ref-list>
<fn-group>
<fn id="n1" fn-type="custom" custom-type="edited-by">
<p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/1488587">Yalin Wu</ext-link>, Peking University, China</p></fn>
<fn id="n2" fn-type="custom" custom-type="reviewed-by">
<p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2691701">Anjan Debnath</ext-link>, Khulna University of Engineering &amp; Technology, Bangladesh</p>
<p><ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3272687">Guo Xu</ext-link>, Shanghai Dianji University, China</p></fn>
</fn-group>
</back>
</article>