<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="methods-article" dtd-version="1.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Ecol. Evol.</journal-id>
<journal-title-group>
<journal-title>Frontiers in Ecology and Evolution</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Ecol. Evol.</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">2296-701X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fevo.2026.1750931</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Methods</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>BugNet: a rapid and scalable pipeline for automated insect monitoring using hierarchical data</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name><surname>Grele</surname><given-names>Ari</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>*</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3287074/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project-administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="software" vocab-term-identifier="https://credit.niso.org/contributor-roles/software/">Software</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="visualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/visualization/">Visualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Richards</surname><given-names>Lora A.</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/522745/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
</contrib-group>
<aff id="aff1"><label>1</label><institution>Department of Entomology, University of Wisconsin-Madison</institution>, <city>Madison</city>, <state>WI</state>,&#xa0;<country country="us">United States</country></aff>
<aff id="aff2"><label>2</label><institution>Department of Biology, University of Nevada Reno</institution>, <city>Reno</city>, <state>NV</state>,&#xa0;<country country="us">United States</country></aff>
<author-notes>
<corresp id="c001"><label>*</label>Correspondence: Ari Grele, <email xlink:href="mailto:arijg21@gmail.com">arijg21@gmail.com</email></corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-02-25">
<day>25</day>
<month>02</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2026</year>
</pub-date>
<volume>14</volume>
<elocation-id>1750931</elocation-id>
<history>
<date date-type="received">
<day>20</day>
<month>11</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>30</day>
<month>01</month>
<year>2026</year>
</date>
<date date-type="rev-recd">
<day>22</day>
<month>01</month>
<year>2026</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2026 Grele and Richards.</copyright-statement>
<copyright-year>2026</copyright-year>
<copyright-holder>Grele and Richards</copyright-holder>
<license>
<ali:license_ref start_date="2026-02-25">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<p>Despite the importance of monitoring insect diversity to ecological and conservation questions, we lack sufficient technologies to monitor insects at scale. While research into automated systems for monitoring biodiversity through camera traps has led to the development of a number of machine learning approaches for insect monitoring, these tools suffer from a lack of training data and face challenges in classifying insects in highly diverse systems where the majority of species are unknown to science. To address these challenges, we developed BugNet, an automated pipeline for aggregating insect image data from online databases and training hierarchical classification models, and test a large-scale insect detection model on GBIF and field images. We show that this system can be used to rapidly create and validate classification models with high accuracy on internet and field images. Furthermore, we show that incorporating hierarchical data into classification models improves their ability of models to handle unknown taxa. These systems are an important step towards a generalized and scalable insect detection platform. While not capable of monitoring every dimension of insect diversity, BugNet can be used to accurately classify insects from camera trap images, and is can be scaled to meet the data needs of larger ecological and conservation questions.</p>
</abstract>
<kwd-group>
<kwd>biodiversity monitoring</kwd>
<kwd>computer vision</kwd>
<kwd>data pipeline</kwd>
<kwd>insect diversity</kwd>
<kwd>tropical forests</kwd>
</kwd-group>
<funding-group>
<funding-statement>The author(s) declared that financial support was received for this work and/or its publication. Funding was supplied through NSF grants DGE-2244337 and EN-2133818.</funding-statement>
</funding-group>
<counts>
<fig-count count="4"/>
<table-count count="3"/>
<equation-count count="0"/>
<ref-count count="84"/>
<page-count count="12"/>
<word-count count="7567"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Conservation and Restoration Ecology</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<title>Introduction</title>
<p>Insects are major contributors to global biodiversity (<xref ref-type="bibr" rid="B63">Stork, 2018</xref>), drive fundamental ecosystem processes (<xref ref-type="bibr" rid="B24">Fr&#xfc;nd et&#xa0;al., 2010</xref>; <xref ref-type="bibr" rid="B83">Yang and Gratton, 2014</xref>), and have been a central component of human culture (<xref ref-type="bibr" rid="B16">Duffus et&#xa0;al., 2021</xref>) and ecology (<xref ref-type="bibr" rid="B23">Flint and van den Bosch, 1981</xref>; <xref ref-type="bibr" rid="B21">Evans and Weinstein, 2021</xref>) for millennia. Our understanding of insects and their&#xa0;global impact relies on our ability to observe and monitor them over meaningful timescales and across ecological and human contexts, from tropical pollinator diversity (<xref ref-type="bibr" rid="B74">Vizentin-Bugoni et&#xa0;al., 2018</xref>) to agricultural pest abundance (<xref ref-type="bibr" rid="B38">Keasar et&#xa0;al., 2023</xref>). As&#xa0;insect&#xa0;populations decline and species loss continues to accelerate (<xref ref-type="bibr" rid="B75">Wagner et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B20">Edwards et&#xa0;al., 2025</xref>) under intensifying anthropogenic stressors, our ability to monitor insect communities is becoming increasingly important. Insect abundance and community composition are highly dynamic, fluctuating over timescales from minutes (<xref ref-type="bibr" rid="B43">Lawson and Rands, 2019</xref>; <xref ref-type="bibr" rid="B81">Wong and Didham, 2024</xref>; <xref ref-type="bibr" rid="B80">Wong, 2025</xref>) to decades (<xref ref-type="bibr" rid="B78">Williams and Simon, 1995</xref>; <xref ref-type="bibr" rid="B76">Weisser and Siemann, 2008</xref>; <xref ref-type="bibr" rid="B5">Barbosa et&#xa0;al., 2012</xref>). Yet much of this variation and its response to natural and human driven processes goes unnoticed due to limitations in our existing monitoring methods. Traditional insect surveying techniques are labor intensive, requiring significant investment either in direct observations in the field (<xref ref-type="bibr" rid="B49">Opp and Prokopy, 1986</xref>), or in post-processing of insect specimens (<xref ref-type="bibr" rid="B3">Aman et&#xa0;al., 2024</xref>) or recorded data (<xref ref-type="bibr" rid="B84">Zaller et&#xa0;al., 2015</xref>; <xref ref-type="bibr" rid="B28">Gilpin et&#xa0;al., 2017</xref>). These studies can be difficult to scale when ecological questions or conservation needs require larger datasets, often forcing researchers to choose between collecting in-depth data over a narrow scope, or shallow data over a wide scope.</p>
<p>For example, much of our understanding of pollinator ecology in the field is based on short duration surveys, where visitation at individual flowers is observed for less than 30 minutes per plant, often with multiple days or weeks between observation periods (<xref ref-type="bibr" rid="B41">Kleijn et&#xa0;al., 2015</xref>; <xref ref-type="bibr" rid="B22">Fijen and Kleijn, 2017</xref>). In some cases, broad conclusions about insect behavior are generalized from observations lasting only a few minutes (<xref ref-type="bibr" rid="B65">Tamburini et&#xa0;al., 2016</xref>). While visual insect surveys outperform passive capture methods such as pan-trapping (<xref ref-type="bibr" rid="B64">T&#x2019;ai et&#xa0;al., 2007</xref>), short observation durations cannot fully describe insect visitation patterns (<xref ref-type="bibr" rid="B22">Fijen and Kleijn, 2017</xref>), or hour to hour and day to day variation in abundance (<xref ref-type="bibr" rid="B40">Kendall and Nicholson, 2025</xref>) and community composition (<xref ref-type="bibr" rid="B73">Venjakob et&#xa0;al., 2016</xref>). Similarly, much of our understanding of tropical insect diversity relies on traditional trapping methods which aggregate insects over days to weeks (For example, <xref ref-type="bibr" rid="B34">Janzen and Hallwachs, 2019</xref>; <xref ref-type="bibr" rid="B61">Seymour et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B3">Aman et&#xa0;al., 2024</xref>), preventing us from understanding short term variation in insect abundance or temporal species turnover (<xref ref-type="bibr" rid="B32">Houadria et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B62">Souza et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B81">Wong and Didham, 2024</xref>).</p>
<p>In the last decade, an increasing need for rapid and scalable biodiversity monitoring techniques has driven the development of automated monitoring systems that use machine learning to supplement (<xref ref-type="bibr" rid="B39">Kelling et&#xa0;al., 2012</xref>; <xref ref-type="bibr" rid="B8">Boulent et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B10">Campbell et&#xa0;al., 2023</xref>) or replace traditional methodologies (<xref ref-type="bibr" rid="B18">Dyer et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B57">Roy et&#xa0;al., 2024</xref>). While these techniques have shown promise in vertebrate systems, such as large mammal identification from camera trap images (<xref ref-type="bibr" rid="B60">Schneider et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B72">V&#xe9;lez et&#xa0;al., 2023</xref>) or bird identification (<xref ref-type="bibr" rid="B37">Kahl et&#xa0;al., 2021</xref>) from audio recordings, machine learning methods have been difficult to apply to insects due to their overwhelming species diversity. High insect diversity impacts automated insect monitoring in multiple ways. Heterogeneity in species distributions leads to high levels of species turnover that make it difficult to generalize models trained on samples from specific locations to other locations, even over short spatial ranges (<xref ref-type="bibr" rid="B61">Seymour et&#xa0;al., 2024</xref>). This heterogeneity additionally means that any general system trained on a large pool of species is likely to suffer from misidentifications within individual sites where only a subset of this pool is present. Each additional species a model is trained to classify creates opportunities for misidentifications, biasing models to treat local species assemblages as if they were as diverse as the global species pool (<xref ref-type="bibr" rid="B25">Fukunaga and Flick, 1984</xref>). General automated systems are additionally constrained by the large proportion of undescribed insect species, as well as species that have scientific descriptions but lack the image documentation necessary for training models (<xref ref-type="bibr" rid="B59">Scheffers et&#xa0;al., 2012</xref>; <xref ref-type="bibr" rid="B63">Stork, 2018</xref>). Although taxonomic gaps affect traditional methods of monitoring biodiversity, they become a challenge for models that require an initial list of potential taxa to train. While traditional taxonomic methods can&#xa0;create new lists of known and unknown species for any given&#xa0;location, the most commonly used classification models are&#xa0;only&#xa0;able to assign identifications based on this initial list. These issues are compounded by the presence of cryptic species (<xref ref-type="bibr" rid="B59">Scheffers et&#xa0;al., 2012</xref>), sexual dimorphism (<xref ref-type="bibr" rid="B2">Allen et&#xa0;al., 2011</xref>), morphological changes across life stages (<xref ref-type="bibr" rid="B77">Wiegmann et&#xa0;al., 2009</xref>), long-tailed species distributions (<xref ref-type="bibr" rid="B33">Jain et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B56">Reyes-Gonz&#xe1;lez et&#xa0;al., 2024</xref>), and a lack of accessible data to train models (<xref ref-type="bibr" rid="B33">Jain&#xa0;et&#xa0;al., 2024</xref>).</p>
<p>Although recent work (see, for example, <xref ref-type="bibr" rid="B67">Teixeira et&#xa0;al., 2023</xref>) has made inroads into solving these problems, through the development of automated image (<xref ref-type="bibr" rid="B27">Geissmann et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B33">Jain et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B6">Beuchert and Gifford, 2024</xref>) and audio data collection platforms, the release of training datasets of varying sizes and granularity (<xref ref-type="bibr" rid="B70">Van Horn et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B82">Wu et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B67">Teixeira et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B33">Jain et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B68">Truong et&#xa0;al., 2025</xref>), and the deployment of task-specific computer vision models (<xref ref-type="bibr" rid="B67">Teixeira et&#xa0;al., 2023</xref>), a generic and scalable system for automated insect identification remains out of reach. While there has been significant work on developing platforms for monitoring specific species (<xref ref-type="bibr" rid="B53">Ratnayake et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B44">Leopold and Jantsch, 2024</xref>) or functional groups such as pollinators or agricultural pests (<xref ref-type="bibr" rid="B50">Pegoraro et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B67">Teixeira et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B1">Alex et&#xa0;al., 2025</xref>) the few existing platforms aimed at cross-taxon insect monitoring are typically restricted to individual orders of insects such as Lepidoptera (<xref ref-type="bibr" rid="B33">Jain et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B57">Roy et&#xa0;al., 2024</xref>) and lack data at lower taxonomic levels (<xref ref-type="bibr" rid="B33">Jain et&#xa0;al., 2024</xref>), making it difficult to generalize data from these systems to ecosystems at large.</p>
<p>One approach that has shown promise for large scale, generalized insect monitoring is the incorporation of hierarchical information into classification systems (see, for example, <xref ref-type="bibr" rid="B7">Bjerge et&#xa0;al., 2023</xref>), which allows models to leverage morphological information from higher taxonomic levels to improve performance on the classification of genera or species. Recent work (<xref ref-type="bibr" rid="B4">Badirli et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B7">Bjerge et&#xa0;al., 2023</xref>) has demonstrated that hierarchical models can have high performance both on classifying known species from their training data and distinguishing these species from novel taxa, which may&#xa0;assist in insect monitoring in highly diverse ecosystems and&#xa0;allow these models to generalize across heterogeneous sites without suffering from misidentifications. However, while existing hierarchical classifiers perform well at identifying taxa not in their training data, they remain limited by available training images with high resolution taxonomic data and in some cases require non-image data such as DNA to operate effectively (<xref ref-type="bibr" rid="B4">Badirli et&#xa0;al., 2023</xref>).</p>
<p>We address these issues by developing BugNet, a system of computer vision models designed to avoid misidentifications of unknown taxa by integrating image annotations across taxonomic levels. To allow these models to operate more accurately in ecosystems where large proportions of taxa are unknown or cannot reasonably be identified from images, we leverage the fact that most online insect image datasets lack species level annotations to directly train models to avoid assigning overly specific IDs to uncertain taxa by treating missing data as information which can influence the models during training. We apply this technique via a hierarchical classification system, which both improves model performance at low taxonomic levels and allows models to still assign classifications to insects at high taxonomic levels when they cannot be classified to species or genus. Additionally, we developed a software ecosystem that allows rapid annotation of image data to train and validate localization and classification models, as well as a data pipeline that quickly automates the scraping of image data from online sources and formats it for model training.</p>
</sec>
<sec id="s2">
<title>Methods</title>
<sec id="s2_1">
<title>The BugNet annotation pipeline</title>
<p>We developed a custom pipeline for scraping and annotating insect images from online databases centered around three major components.</p>
<list list-type="simple">
<list-item>
<p>1) We built a custom annotation tool which allows for the extremely rapid manual annotation of bounding boxes, i.e. boxes that define the location and size of an insect, and categorical annotations of downloaded images. This software couples common operations, such as saving data&#xa0;and loading new images, into single actions which can be performed entirely with the mouse or keyboard. Keybindings are fully customizable, allowing users to adapt the software to their own workflows. The annotation software can be set to allow the annotation of bounding boxes, circles, and lines to provide support for multiple types of data which may be useful to associate with training images (<xref ref-type="fig" rid="f1"><bold>Figure&#xa0;1</bold></xref>). In practice, we&#x2019;ve found that when annotating bounding boxes insect images can be processed at approximately 3.1 seconds per image (1,160 images per hour), and when annotating categorical information images can be processed at 0.6 seconds per image (6,000&#xa0;images per hour), allowing new data to be rapidly incorporated into training datasets. Annotated data is saved in a standardized JSON format which can be easily converted to COCO (<xref ref-type="bibr" rid="B45">Lin et&#xa0;al., 2015</xref>) or YOLO (<xref ref-type="bibr" rid="B35">Jocher et&#xa0;al., 2020</xref>) data formats.</p></list-item>
<list-item>
<p>2) We developed a data pipeline for rapidly scraping insect images from the Global Biodiversity Information Facility (GBIF) database. For a set of geocoordinates and a radius around those coordinates, all available insect observations are pulled from GBIF using the pygbif GBIF python client (<xref ref-type="bibr" rid="B11">Chamberlain and Boettiger, 2017</xref>). These observations are filtered against a user defined list of taxa to include or avoid, and all appropriately licensed image media associated with the remaining observations are downloaded directly using image URIs associated with each GBIF observation. Images are only downloaded if licensed under CC licenses permitting derivative work. Full images from this process are passed through a binary image filter based on an EfficientNet-B0 classification model (<xref ref-type="bibr" rid="B66">Tan and Le, 2019</xref>), trained to remove images which do not include insects (e.g. images of field notes or host plants), images which do not include appropriate life stages (e.g. images of caterpillars and other larvae), and images which are not of sufficient quality to use for training new models (e.g. extremely under-exposed, over-exposed, and blurry images). This binary filter was trained on approximately 10,000 images manually annotated from a random sample of GBIF insect media. Filtered images are passed through a YOLO-V5-m localization model (<xref ref-type="table" rid="T1"><bold>Table&#xa0;1</bold></xref>, <xref ref-type="bibr" rid="B35">Jocher et&#xa0;al., 2020</xref>) trained on 250,000 images to localize and crop insects from images. Cropped insect images are passed through a final EfficientNet-B0 binary filter trained on 40,000 images to remove incorrectly cropped objects and other low-quality images. As this system relies on expert annotations on GBIF images, only cropped insects from images that include a single detected insect are retained for training, as the taxonomic annotation cannot be determined when multiple insects are present in an image. Images are stored with all associated metadata from GBIF, including the full taxonomic information available below taxonomic class. If information is not known for a certain taxonomic level, that level and all lower taxonomic levels are saved as &#x201c;Unknown&#x201d;.</p></list-item>
<list-item>
<p>3) We created a taxonomic annotation tool which enables the rapid validation of existing data and annotation of new image data with taxa names following the GBIF taxonomic backbone (<xref ref-type="bibr" rid="B26">GBIF Backbone Taxonomy, 2025</xref>.). Images are annotated in bulk, with up to 200 images annotated simultaneously per user input. Images with existing annotations can be filtered by taxonomic name and the level of taxonomic precision. Unannotated images can be visually compared to reference images for each taxon downloaded from GBIF to improve annotation accuracy (<xref ref-type="fig" rid="f2"><bold>Figure&#xa0;2</bold></xref>). Annotated data is saved in a standardized JSON format, which can easily incorporate new taxonomic annotations into existing training data or statistical analyses. In practice, we&#x2019;ve found that trained entomologists can use this software to annotate 35 images per minute (2,100 images per hour) to the level of genus or lower. All annotation software was built using Godot V4.2.</p></list-item>
</list>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>Example view of insect annotation software. A field image of <italic>Trigona</italic> bees on a fruit bait at Iyarina Research Station, Ecuador, in the process of annotation with bounding boxes. Insects can be annotated using a simple user interface with bounding boxes, circles, lines, and text comments via mouse or keyboard.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fevo-14-1750931-g001.tif">
<alt-text content-type="machine-generated">A screenshot of the graphical user interface provided by the BugNet Rapid Insect Annotation software. The software consists of a menu bar at the top of the window and two main central panels: the left panel displays file information, annotated comments, and system information. The right panel displays the image currently being annotated, an aggregation of bees in the genus Trigona feeding from a piece of cantaloupe. The bees are in the process of being annotated with bounding boxes. </alt-text>
</graphic></fig>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Definitions for machine learning approaches and common performance metrics.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="left">Term</th>
<th valign="middle" align="left">Definition</th>
<th valign="middle" align="left">References</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Balanced accuracy</td>
<td valign="middle" align="left">A measure of accuracy suited for datasets where some classes are greatly over or under represented. It is defined as the sum of the true positive and true negative rates divided by two. For classification with multiple output classes, each class is treated as a binary classification, and true and false positive rates are averaged across classes (See <xref ref-type="bibr" rid="B31">Hand and Till, 2001</xref>). Values range from zero (low performance) to one (high performance).</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B9">Brodersen et&#xa0;al., 2010</xref></td>
</tr>
<tr>
<td valign="top" align="left">F-score</td>
<td valign="middle" align="left">The harmonic mean of the precision and recall. Values range from zero (low performance) to one (high performance).</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B36">Jude Chukwura Obi, 2023</xref></td>
</tr>
<tr>
<td valign="top" align="left">Intersection Over Union (IOU)</td>
<td valign="middle" align="left">A measure of the degree of overlap between two sets or regions. For regions, it is defined as the area shared between two regions divided by the total area covered by those regions. Values range from zero (no overlap) to one (perfect overlap).</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B14">da F. Costa, 2021</xref></td>
</tr>
<tr>
<td valign="top" align="left">Localization</td>
<td valign="middle" align="left">The act of determining the location and shape of a foreground object in an image, without classification of other attributes.</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B54">Redmon et&#xa0;al., 2016</xref></td>
</tr>
<tr>
<td valign="top" align="left">Precision</td>
<td valign="middle" align="left">In classification tasks, the number of true positives divided by the total number of true and false positives (i.e. the proportion of positive IDs which are correct). Values range from zero (low performance) to one (high performance).</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B36">Jude Chukwura Obi, 2023</xref></td>
</tr>
<tr>
<td valign="top" align="left">Recall</td>
<td valign="middle" align="left">In classification tasks, the number of true positives divided by the total number of true positives and false negatives. Values range from zero (low performance) to one (high performance).</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B36">Jude Chukwura Obi, 2023</xref></td>
</tr>
<tr>
<td valign="top" align="left">Receiver Operating Characteristic (ROC)</td>
<td valign="middle" align="left">The curve defined by the relationship between the true positive rate of a dataset and the false positive rate at different confidence thresholds. The area under this curve is a measure of model performance, where zero indicates low performance and one indicates high performance.</td>
<td valign="middle" align="left"><xref ref-type="bibr" rid="B36">Jude Chukwura Obi, 2023</xref></td>
</tr>
</tbody>
</table>
</table-wrap>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>Example view of insect taxonomic validation software. Field images of insects observed at La Selva Biological Station, Costa Rica, filtered to order level identifications. The images consist primarily of stingless bees in the genus <italic>Trigona</italic>. Insects can be bulk selected and lower-level taxonomic annotations applied through a drop-down menu.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fevo-14-1750931-g002.tif">
<alt-text content-type="machine-generated">A screenshot of the graphical user interface provided by the BugNet Rapid Taxonomic Annotation software. The software consists of a menu bar at the top of the window and two main panels: The left panel displays a menu for searching for taxon names and functional buttons for saving data, filtering data, and paging through images. The right panel displays a grid of images of insects, primarily bees in the genus Trigona. The software is set to filter images to show only those identified to order, and the results for the term "Trigona" are displayed in the search menu. </alt-text>
</graphic></fig>
</sec>
<sec id="s2_2">
<title>The BugNet inference pipeline</title>
<p>BugNet follows a four part data pipeline centered around a modular localization and hierarchical classification system. Insects are cropped from input images using a YOLO-V5-M (<xref ref-type="bibr" rid="B35">Jocher et&#xa0;al., 2020</xref>) localization model trained on 250,000 manually annotated insect images. This model is used for insect localization only before passing cropped insects to a separate model for classification. Although there are more recently developed models available in the YOLO family of architectures, we consistently find YOLO-V5 outperforms more recent models in localization only tasks. The localization model can be run on images of any resolution, but expects insects to be between approximately 5x5 pixels and 800x800 pixels in size. As such, images where insects fall outside of this range may have to be rescaled before being processed. As insects attracted to light and chemical baits can typically vary in size over three orders of magnitude (<xref ref-type="bibr" rid="B13">Chown and Gaston, 2010</xref>), the localization model is run twice on each input image: once at a resolution defined by the user, and once with the image resolution decreased by a factor of eight. Potential duplicate detections are filtered out by applying a size filter to each bounding box, ensuring that the user-defined resolution captures only small insects and the reduced resolution pass captures only large insects. An adjustable confidence threshold is applied to remove low-probability detections from each image, and the remaining cropped insect images are scaled to 128x128 pixels before being passed to an EfficientNet-B5 classification model.</p>
<p>To incorporate information across multiple taxonomic levels, the classification model was modified to incorporate hierarchical outputs, allowing the simultaneous output of confidence scores for&#xa0;taxonomic order, family, genus, and species (<xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Figure S1</bold></xref>). Incorporation of hierarchical data into models has shown promise in systems where objects can be grouped taxonomically (<xref ref-type="bibr" rid="B55">Redmon and Farhadi, 2017</xref>), and more recently in insect systems (<xref ref-type="bibr" rid="B4">Badirli et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B7">Bjerge et&#xa0;al., 2023</xref>). The BugNet hierarchical classifier differs from these models by using a simplified architecture which only modifies the final layer of a pre-existing model, and by using a modified loss function which allows the model to incorporate images which lack annotations at certain taxonomic levels (<xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Figure S2</bold></xref>), i.e. the training data can contain images for which the species, genus, family, or order are unknown. Complete details on how this architecture was developed and the loss functions used to incorporate hierarchical information into the models can be found in the supplement. This approach allows the model to learn to directly discriminate known and unknown taxa by using missing labels as data to inform model training, without using <italic>post-hoc</italic> out-of-distribution tests (<xref ref-type="bibr" rid="B46">Liu et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B12">Chen et&#xa0;al., 2025</xref>) or anomaly detection (<xref ref-type="bibr" rid="B7">Bjerge et&#xa0;al., 2023</xref>; <xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Figures S3-S4</bold></xref>).</p>
<p>Classified images are run through a binary filter based on an EfficientNet-B0 model trained on 56,000 field images to remove additional false positives. As cameras with fixed positions produce series of images which may capture the same insect multiple times, the cropped insects are clustered across frames using a networking approach. For each detected insect, the intersection over union (IOU; See <xref ref-type="table" rid="T1"><bold>Table&#xa0;1</bold></xref> for term definitions) between the insect&#x2019;s bounding box and all other predicted bounding boxes from the previous frame is calculated, and boxes with indices greater than 60% are connected within a single sub-network. This process is repeated iteratively across all input images to build a network representing all groups of bounding boxes which have a high probability of representing the same insect over multiple frames, giving each a unique ID. Cropped insect images can be saved with their associated ID and predicted taxon for later validation or statistical processing. All software is made available under the GNU Affero General Public License v3.0.</p>
</sec>
<sec id="s2_3">
<title>Model training and validation</title>
<p>The YOLO-V5-m localization model used for both cropping of GBIF image data and field image inference was trained at a resolution of 832x832 pixels on a dataset of 250,000 manually annotated insect images (1&#x2013;108 insects per image, ~260,000 total) with 70% of these images used for training and 30% for validation. Insect images used for training this model were downloaded from GBIF and represent temperate and tropical species from 11 orders. To improve the model&#x2019;s capability to detect insects over large body size ranges, a random scale augmentation was applied to each image batch, where images were randomly scaled on the range of 50%-150% of the input size. The standard PyTorch RandAugment image augmentation suite was applied on top of these augmentations. Model overfitting was monitored by comparing performance on the training and validation imageset in each epoch, and model performance during training was determined from Precision, Recall, and F-score metrics.</p>
<p>To understand model robustness to image distortions that might typically be encountered in the field, we tested the performance of the localization and classification models on augmented validation images. For each image, we tested fractions of salt and pepper noise ranging from 5% to 60% of pixels in an image being modified to all white or all black, exposures of 0.5x and 1.5x normal image brightness, and Gaussian blurs with 5x5, 7x7, and 11x11 kernels. We note that the highest levels of image noise and blur tested here fell well outside the range we would typically expect from field images, but this allowed us to capture the range of image distortions the models can still function under. Salt and pepper noise was applied to images for a given noise fraction such that a) exactly that fraction of pixels would be modified rounded up to the nearest pixel, and b) if there were an odd number of pixels to be modified there would be exactly one extra white pixel. Brightness was modified linearly using the adjust_brightness() function from the torchvision transforms.functional library. Gaussian blur was applied using the GaussianBlur() function from torchvision transforms.v2. with a standard deviation of 1, 2, and 5 pixels respectively for each level of blur. Augmentations were applied to images at their standard dimensions (ranging from 5 x 5 px to 6,000 x 4,000 px, mode resolution = 1024 x 1024 px) before being scaled to the size required by each model. Images for localization were scaled to 832 x 832 px and images for filtering and classification were scaled to 128 x 128 px. Model performance was evaluated on balanced accuracy, precision, recall, and F-score metrics across images with annotations at each taxonomic level. All computer vision models were trained on an NVIDIA GeForce RTX 3060-Ti GPU for 100 epochs.</p>
</sec>
<sec id="s2_4">
<title>Large scale BugNet tests</title>
<p>For testing the model training and inference pipeline at larger scales, we trained an EfficientNet-B5 model under the Wide architecture and coding unlabeled images as zeros (See <xref ref-type="supplementary-material" rid="SM1"><bold>supplemental methods</bold></xref>) on approximately 1.12 million images of neotropical nocturnal insects sampled automatically from GBIF. We focus on tropical insects for two major reasons. First, while tropical forests contain the majority of the world&#x2019;s biodiversity, they are underrepresented in existing classification datasets (but see <xref ref-type="bibr" rid="B33">Jain et&#xa0;al., 2024</xref> and <xref ref-type="bibr" rid="B29">Gon&#xe7;alves-Souza et&#xa0;al., 2025</xref> as recent examples in these systems). Second, the high species richness of these ecosystems in combination with the large number of undiscovered species they contain make tropical systems ideal for testing the performance of classification models on known and unknown species. In the full dataset, 100% of images were labeled to the level of Order, 99.44% to Family, 78.57% to Genus, and 2.64% to Species. The training dataset contained 13 orders, 230 families, 915 genera, and 34 species. Due to the presence of families with no label at the genus level, and genera with no species level labels, this represented a total of 1127 unique taxa. The model was trained following methodologies established above for 450 epochs.</p>
<p>To understand model behavior in field conditions, we ran the full model pipeline on images of nocturnal insects collected using camera traps in Costa Rica and Ecuador. Images were collected using 16MP Arducam board cameras at light traps at La Selva Biological Station, Costa Rica; Iyarina Research Station, Ecuador; and Yanayacu Research Station, Ecuador for four to eight nights at each site (<xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Table S1</bold></xref>). A subset of 5,000 images containing insects from these sites were selected for analysis, but were not otherwise curated before testing. Images were localized at a resolution of 4656 x 3496 px. Classified images were validated by a trained entomologist to the lowest taxonomic level that could reasonably be applied for any given image. If features necessary for identification were ambiguous at a certain taxonomic level, that level was left unannotated and a classification was applied at the next highest level. Model performance was evaluated on balanced accuracy, precision, recall, and F-score metrics on images with annotations at each taxonomic level. We additionally report these metrics on images which could not be confidently annotated. While these do not necessarily represent misidentifications, as a model ID may be correct even when an image cannot be confidently classified by a human, they do represent the model operation on images which received an unverifiable ID. All models were trained and tested using PyTorch with CUDA v11.7.</p>
</sec>
<sec id="s2_5">
<title>Statistics</title>
<p>Performance for the localization model was calculated by 1) merging predicted bounding boxes with an intersection over union (IOU; <xref ref-type="bibr" rid="B14">da F. Costa, 2021</xref>) greater than 0.25, 2) choosing the bounding box with the highest IOU over 0.25 as the prediction for a ground truth box, and 3) using standard binary methodologies for calculating balanced accuracy, precision, recall, F-score, and the area under the Receiver Operating Characteristic (ROC) curve (<xref ref-type="table" rid="T1"><bold>Table&#xa0;1</bold></xref>). Performance for the hierarchical classifier was calculated using standard microaveraged techniques (i.e. by averaging across all samples without weighting by taxon) for balanced accuracy, precision, recall, and F-score. These metrics were calculated after applying a 50% confidence threshold to estimate the ability of models to ignore unlabeled images. Multi-class area under the ROC curve was calculated following the method of <xref ref-type="bibr" rid="B31">Hand and Till (2001)</xref>.</p>
<p>Accuracy, precision, and recall were typically calculated based only on known taxa within any given taxonomic level, as a classification cannot reasonably be considered correct or incorrect when there is no ground truth to validate against. As classification models are frequently applied with confidence thresholds which remove low confidence classifications, the inclusion of &#x201c;unknowns&#x201d; in model outputs should not strongly affect how these data are interpreted compared to traditional classification models, but the presence of unknowns in the training and test data will affect the interpretation of model performance on datasets where different proportions of the data are labeled. As such, these performance metrics may appear overinflated in this study compared to studies where all images are labeled, and direct comparisons of taxonomic levels become difficult as each taxonomic level in the data we present here has different proportions of labeled and unlabeled images. To help correct for this, model performance on the field dataset was calculated based on the entire dataset as well as the subset of the data which could be confidently labeled by including performance metrics where &#x201c;unknowns&#x201d; are considered a separate class in each taxonomic level. The proportion of field images which received any classification was additionally calculated for the full dataset, to allow comparison to recall of known taxa. Unless stated otherwise, accuracy, precision, and recall should be interpreted as model performance only when taxa are known.</p>
<p>Model performance for each suite of augmentations was compared to baseline performance using beta regressions with a logit link function. As the quality of cropped insect images varied dramatically with insect size and resolution within each field image, we additionally fit generalized linear models with binomial distributions for each taxonomic level comparing model precision to image resolution for all known cropped field insects. All analyses were conducted in R version 4.2.1 (<xref ref-type="bibr" rid="B52">R: The R Project for Statistical Computing, 2025</xref>).</p>
</sec>
</sec>
<sec id="s3" sec-type="results">
<title>Results</title>
<sec id="s3_1">
<title>Localization and filter model performance</title>
<p>Localization and binary filter model performance were high on unaugmented images (<xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Tables S4, S5</bold></xref>). Area under the ROC was 97.85 for the localization model and 99.15 for the filter model on unaugmented images. Localization model accuracy was reduced by addition of noise (pseudo&#x2010;R<sup>2</sup> = 0.79, P &lt; 0.001) and blur (pseudo&#x2010;R<sup>2</sup> = 0.78, P &lt; 0.001), but not by changes in brightness (pseudo&#x2010;R<sup>2</sup> = 0.32, P = 0.255). However, model accuracy only significantly differed from baseline performance when &gt; 40% of pixels were replaced with noise (pseudo&#x2010;R<sup>2</sup> = 0.24, P &lt; 0.001) and at the highest level of blur (pseudo&#x2010;R<sup>2</sup> = 0.04, P = 0.003; <xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Table S4</bold></xref>). The binary filter model was more dramatically impacted by noise and blur, with significantly reduced accuracy with &gt;10% of pixels replaced by noise (pseudo&#x2010;R<sup>2</sup> = 0.69, P &lt; 0.02) and at the two highest levels of blur (pseudo&#x2010;R<sup>2</sup> = 0.83, P &lt; 0.001; <xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Table S5</bold></xref>). As the binary filter acts to remove low quality insect crops before classification, this decrease in accuracy represents insects which would not be passed to the hierarchical classifier due to noise or blur.</p>
</sec>
<sec id="s3_2">
<title>Classification model performance</title>
<p>The dataset scraped from GBIF using the BugNet pipeline consisted of 1.12 million images representing 13 orders, 230 families, 915 genera, and 34 species of insect for a total of 1127 unique taxa.</p>
<p>The number of training images available for each taxon was typically highest at the order level and lowest at the species level, with a median of 10,543 images in each order, 1,263 in each family, 520 in each genus, and 431 in each species. However, sample sizes were highly imbalanced across taxa, with the least represented order, Dermaptera, containing only 1,050 images and the most represented order, Lepidoptera, containing 263,071 images. Similarly, samples sizes ranged from 225 to 51,451 at the family level, 219 to 4,903 at the genus level, and 240 to 1,689 at the species level (<xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Figure S5A</bold></xref>).</p>
<p>Classification model performance was typically highest at the order level, followed by family, species, and genus. Model balanced accuracy was 98.6%, 94.6%, 85.7%, and 84.3% at the order through species level, respectively (<xref ref-type="table" rid="T2"><bold>Table&#xa0;2</bold></xref>). The area under the ROC was 93.50%, 88.39%, 82.27%, and 90.33%, for each taxonomic level, respectively (<xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Table S6</bold></xref>). Model accuracy was typically highest for taxa in the order Lepidoptera, which comprised the majority of images in the training data, and the only orders with accuracy below 90% were the four least represented orders, Orthoptera, Blattodea, Mantodea, and Neuroptera, which together comprised less than 4% of the training database. Errors when classifying these orders were primarily misidentifications as Lepidoptera, likely to due bias introduced by imbalanced training data. Outside of these orders, errors tended to cluster within taxonomic groups. For example, while family level accuracy in the order Ephemeroptera was only 63.2%, 83.9% of these misclassified images were still classified as the correct order (<xref ref-type="fig" rid="f3"><bold>Figure&#xa0;3</bold></xref>). Model accuracy was significantly reduced by the addition of noise (pseudo&#x2010;R<sup>2</sup> = 0.25, P &lt; 0.001) and image blur (pseudo&#x2010;R<sup>2</sup> = 0.23, P = 0.011), primarily driven by reduced accuracy at the highest level of noise (pseudo&#x2010;R<sup>2</sup> = 0.48, P = 0.002) and blur (pseudo&#x2010;R<sup>2</sup> = 0.42, P = 0.007). There was no effect of image brightness on model accuracy (pseudo&#x2010;R<sup>2</sup> &lt; 0.001, P = 0.91). Model performance did not significantly differ from baseline performance on any augmentation within standard levels expected in the field for precision (<xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Table S7</bold></xref>), recall (<xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Table S8</bold></xref>), or balanced accuracy (<xref ref-type="table" rid="T2"><bold>Table&#xa0;2</bold></xref>).</p>
<table-wrap id="T2" position="float">
<label>Table&#xa0;2</label>
<caption>
<p>Classification model accuracy across taxonomic levels with a confidence threshold of 50%.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="left">Augmentation</th>
<th valign="middle" align="left">Strength</th>
<th valign="middle" align="left">Order</th>
<th valign="middle" align="left">Family</th>
<th valign="middle" align="left">Genus</th>
<th valign="middle" align="left">Species</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">Unaugmented</td>
<td valign="middle" align="left">Normal</td>
<td valign="middle" align="right">98.6</td>
<td valign="middle" align="right"><bold>94.63*</bold></td>
<td valign="middle" align="right"><bold>85.68</bold></td>
<td valign="middle" align="right"><bold>84.33</bold></td>
</tr>
<tr>
<td valign="middle" rowspan="5" align="left">Salt and Pepper</td>
<td valign="middle" align="left">5%</td>
<td valign="middle" align="right">98.51</td>
<td valign="middle" align="right">93.76</td>
<td valign="middle" align="right">84.51</td>
<td valign="middle" align="right">84.05</td>
</tr>
<tr>
<td valign="middle" align="left">10%</td>
<td valign="middle" align="right">98.42</td>
<td valign="middle" align="right">92.97</td>
<td valign="middle" align="right">83.31</td>
<td valign="middle" align="right">83.92</td>
</tr>
<tr>
<td valign="middle" align="left">20%</td>
<td valign="middle" align="right">98.14</td>
<td valign="middle" align="right">91.29</td>
<td valign="middle" align="right">80.9</td>
<td valign="middle" align="right">83.77</td>
</tr>
<tr>
<td valign="middle" align="left">40%</td>
<td valign="middle" align="right">96.67</td>
<td valign="middle" align="right">85.74</td>
<td valign="middle" align="right">73.75</td>
<td valign="middle" align="right">80.71</td>
</tr>
<tr>
<td valign="middle" align="left">60%</td>
<td valign="middle" align="right">88.69</td>
<td valign="middle" align="right">69.76</td>
<td valign="middle" align="right">59.18</td>
<td valign="middle" align="right">68.85</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="left">Brightness</td>
<td valign="middle" align="left">1.5x</td>
<td valign="middle" align="right">98.5</td>
<td valign="middle" align="right">93.83</td>
<td valign="middle" align="right">84.49</td>
<td valign="middle" align="right">83.76</td>
</tr>
<tr>
<td valign="middle" align="left">0.5x</td>
<td valign="middle" align="right"><bold>98.65</bold></td>
<td valign="middle" align="right">94.45</td>
<td valign="middle" align="right">85.33</td>
<td valign="middle" align="right">84.23</td>
</tr>
<tr>
<td valign="middle" rowspan="3" align="left">Gaussian Blur</td>
<td valign="middle" align="left">5&#xd7;5</td>
<td valign="middle" align="right">98.5</td>
<td valign="middle" align="right">93.09</td>
<td valign="middle" align="right">82.96</td>
<td valign="middle" align="right">83.43</td>
</tr>
<tr>
<td valign="middle" align="left">7&#xd7;7</td>
<td valign="middle" align="right">97.73</td>
<td valign="middle" align="right">88.98</td>
<td valign="middle" align="right">76.86</td>
<td valign="middle" align="right">81.04</td>
</tr>
<tr>
<td valign="middle" align="left">11&#xd7;11</td>
<td valign="middle" align="right">91.24</td>
<td valign="middle" align="right">72.25</td>
<td valign="middle" align="right">59.13</td>
<td valign="middle" align="right">67.15</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>*Bold values indicate highest performance for that metric across augmentation experiments.</p></fn>
</table-wrap-foot>
</table-wrap>
<fig id="f3" position="float">
<label>Figure&#xa0;3</label>
<caption>
<p>Confusion matrices for each taxonomic level in the largest classification model. Tile colors indicate the proportion of images in the validation dataset predicted as a specific taxon for each ground-truth taxon. Values for Families, Genera, and Species have been aggregated within orders, with the final &#x201c;Within order error&#x201d; column indicating the proportion of images misidentified as another taxon within the correct order. Tiles representing &gt; 10% of images for a taxon have been labeled. Note that several orders lack any species due to the low percentage of images annotated to the species level.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fevo-14-1750931-g003.tif">
<alt-text content-type="machine-generated">Four panels showing confusion matrices for the largest classification model tested. The panels show the matrices for taxonomic order, family, genus, and species, aggregated to the level of order. The diagonal of each matrix contains the highest valued cells, ranging from 55.6% to 99.5% at the order level, 63.2% to 96.9% at the family level, 40.8% to 98.4% at the genus level, and 59.2% to 97.9 at the species level. The majority of cells in the species level matrix contain no data. The majority of high value cells outside of the diagonal fall in the column representing Lepidoptera and the column representing "within order errors". </alt-text>
</graphic></fig>
</sec>
<sec id="s3_3">
<title>Field performance</title>
<p>The localization model was able to detect a total of 13,739 insects in the 5,000 field images tested. From these images, a total of 12 orders, 56 families, 35 genera, and 3 species were manually identified. There was considerable variation in the number of images identified for each taxon in each taxonomic level (<xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Figure S5B</bold></xref>), with the least represented orders, families, genera, and species consisting of only one image, while the most represented order, Lepidoptera (<xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Figures S6A-D</bold></xref>), was identified from 6,743 images. The most represented family, Erebidae (<xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Figures S6B-D</bold></xref>), was identified from 3,356 images, and the most represented genus, Amastus (<xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Figure S6D</bold></xref>) was identified from 719 images. Identification of insect taxa and especially species was limited in part by cryptic taxa which cannot be identified without microscopy or dissection, but was also affected by low image quality (<xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Figure S6</bold></xref>) and the abundance of extremely small insects. For example, the smallest insects which could consistently identified below order were two to four millimeters in length (e.g. <xref ref-type="supplementary-material" rid="SM1"><bold>Supplementary Figure S6J</bold></xref>), but approximately 21% of insects cropped from field images were smaller than this length (<xref ref-type="fig" rid="f4"><bold>Figure&#xa0;4</bold></xref>).</p>
<fig id="f4" position="float">
<label>Figure&#xa0;4</label>
<caption>
<p>Classification model performance compared to resolution of cropped insects. Frequency of insect crop resolutions observed across field images (upper panel) and curves of best fit from binomial regression of model precision on known taxa against crop resolution (lower panel).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fevo-14-1750931-g004.tif">
<alt-text content-type="machine-generated">Two panels showing the relationship between observed insect size and classification model precision. The top panel displays a density plot of insect sizes that is heavily skewed to small insects (less than 100 pixels to a side) with a hump around 400 pixels and a tail reaching 1700 pixels. The lower panel displays sigmoid curves representing increasing precision as insect resolution increases. Precision is highest at the level of taxonomic order, which is near 100% regardless of insect size, followed by family, genus, and species. Genus level precision increases rapidly and approaches 100% as insects exceed 600 pixels to a side. Species level performance approaches 100% as insects exceed 1200 pixels. </alt-text>
</graphic></fig>
<p>On images where taxa could be manually identified, model balanced accuracy was 98.3%, 96.2%, 93.3%, and 79.2% at the order through species level, respectively. Model recall was 97.0%, 92.7%, 86.8%, and 58.5% at the order through species level (<xref ref-type="table" rid="T3"><bold>Table&#xa0;3</bold></xref>). Species classifications could not confidently be applied in 99.52% of images, but model attempts to classify these images were minimal. Species predictions were applied to unknown insects in 0.67% of cases, and for images where genus could be confidently identified but species were unknown, species predictions were applied 2.3% of the time. Model precision was highly affected by the resolution of detected insects at the level of species (&#x3c7;<sup>21,129</sup> = 6.29, P = 0.033), genus (&#x3c7;<sup>21,3272</sup> = 979.7, P &lt; 0.001), and family (&#x3c7;<sup>21,8092</sup> = 17.2, P &lt; 0.001), but not order (&#x3c7;<sup>21,13186</sup> = 2.5, P = 0.13). Model precision was low below the median insect size observed in field images, but precision approached 100% for insects larger than 500x500 px at the genus level, approximately 3.25x3.25 cm at the typical field of view the cameras were set at (<xref ref-type="fig" rid="f4"><bold>Figure&#xa0;4</bold></xref>). This increase in precision closely mirrored the increase in the proportion of taxa that could be confidently identified by eye as insect size increased. Although species level precision was low across observed insect sizes (<xref ref-type="table" rid="T3"><bold>Table&#xa0;3</bold></xref>), we note that only two of the species present in the training set, <italic>Samea ecclesialis</italic> and <italic>Pantherodes conglomerata</italic> were observed in field images. Precision on these taxa was 75% and 85.4% respectively.</p>
<table-wrap id="T3" position="float">
<label>Table&#xa0;3</label>
<caption>
<p>Model performance on insect taxa from field images at a 50% confidence threshold.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="left">Taxonomic level</th>
<th valign="middle" align="left">Known taxa<sup>*</sup></th>
<th valign="middle" align="left">Percent classified</th>
<th valign="middle" align="left">Taxon type</th>
<th valign="middle" align="left">Accuracy<sup>&#x2021;</sup></th>
<th valign="middle" align="left">Precision</th>
<th valign="middle" align="left">Recall</th>
<th valign="middle" align="left">F-score</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" rowspan="2" align="left">Order</td>
<td valign="middle" rowspan="2" align="left">12</td>
<td valign="middle" rowspan="2" align="left">95.98%</td>
<td valign="middle" align="left">Predicted</td>
<td valign="middle" align="right">98.38</td>
<td valign="middle" align="right">97.11</td>
<td valign="middle" align="right">96.99</td>
<td valign="middle" align="right">97.05</td>
</tr>
<tr>
<td valign="middle" align="left">Confirmed</td>
<td valign="middle" align="right">98.36</td>
<td valign="middle" align="right">97.21</td>
<td valign="middle" align="right">96.97</td>
<td valign="middle" align="right">97.09</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="left">Family</td>
<td valign="middle" rowspan="2" align="left">55</td>
<td valign="middle" rowspan="2" align="left">58.91%</td>
<td valign="middle" align="left">Predicted</td>
<td valign="middle" align="right">96.94</td>
<td valign="middle" align="right">82.03</td>
<td valign="middle" align="right">93.98</td>
<td valign="middle" align="right">87.60</td>
</tr>
<tr>
<td valign="middle" align="left">Confirmed</td>
<td valign="middle" align="right">96.26</td>
<td valign="middle" align="right">84.99</td>
<td valign="middle" align="right">92.68</td>
<td valign="middle" align="right">88.67</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="left">Genus</td>
<td valign="middle" rowspan="2" align="left">35</td>
<td valign="middle" rowspan="2" align="left">23.82%</td>
<td valign="middle" align="left">Predicted</td>
<td valign="middle" align="right">95.14</td>
<td valign="middle" align="right">67.03</td>
<td valign="middle" align="right">90.36</td>
<td valign="middle" align="right">76.96</td>
</tr>
<tr>
<td valign="middle" align="left">Confirmed</td>
<td valign="middle" align="right">93.32</td>
<td valign="middle" align="right">72.64</td>
<td valign="middle" align="right">86.82</td>
<td valign="middle" align="right">79.1</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="left">Species</td>
<td valign="middle" rowspan="2" align="left">2</td>
<td valign="middle" rowspan="2" align="left">0.95%</td>
<td valign="middle" align="left">Predicted</td>
<td valign="middle" align="right">79.21</td>
<td valign="middle" align="right">29.23</td>
<td valign="middle" align="right">58.41</td>
<td valign="middle" align="right">38.97</td>
</tr>
<tr>
<td valign="middle" align="left">Confirmed</td>
<td valign="middle" align="right">79.22</td>
<td valign="middle" align="right">84.44</td>
<td valign="middle" align="right">58.56</td>
<td valign="middle" align="right">69.09</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>*Number of taxa observed in the field which were also included in the model&#x2019;s training data.</p></fn>
<fn>
<p>&#x2021;Balanced accuracy across all images.</p></fn>
<fn>
<p>Performance metrics have been split for all taxa which were assigned a prediction, and the subset of taxa which could be identified by a human.</p></fn>
</table-wrap-foot>
</table-wrap>
<p>Additional results, including an investigation of how the models handle &#x201c;unknown&#x201d; taxa that were not part of the training dataset can be found in the supplement.</p>
</sec>
</sec>
<sec id="s4" sec-type="discussion">
<title>Discussion</title>
<p>Our results demonstrate that the BugNet pipeline can be used to create accurate insect classification models with minimal human labor. The inherently hierarchical nature of the models produced using this pipeline allows the incorporation of unlabeled data into training imagesets and enables classification across multiple taxonomic levels, even in systems where a majority of the taxa present are unknown. The largest model tested had high accuracy when identifying insects at the species level and distinguishing between known and unknown species in the GBIF validation data, despite a lack of species annotations for more than 97% of the training images. Precision was lower at the species level in field images, primarily driven by the presence of small insects. Model accuracy was high across taxonomic levels, with the model rarely predicting classifications in circumstances where manual identification could not be confidently applied. We note that, due to the removal of unlabeled images from the ground truth data before some analyses, model performance at classifying all insects photographed in the field is difficult to interpret, and some metrics such as recall may be inflated when considering only known taxa. When including all 13,700 insects photographed in the field regardless of whether they could be identified manually, the classification model assigned orders and families to the majority of images but considered the majority of genera and species unknown, roughly mirroring human behavior when manually identifying the same insects. While this behavior may make data interpretation more challenging and may be less suitable for certain monitoring tasks, the ability for automated systems to confidently classify insects as unknown taxa is necessary when operating in systems where the majority of taxa cannot be identified, due to the presence of minute, cryptic, or undescribed species.</p>
<p>Our model pipeline demonstrated high performance on known taxa in multiple field settings, and enabled the rapid analysis and validation of field images with minimal effort. Validation of the 13,700 insects captured in the subset of camera images took a single researcher using our software approximately 6 hours. While these systems do not solve the issues plaguing automated monitoring of insects alone, we believe they are an important step on the path to highly scalable insect monitoring. By allowing computer vision models to better incorporate missing data, and by automating the scraping of insect image data from online repositories, these systems allow rapid training, deployment, and validation of new large-scale insect monitoring projects.</p>
<p>Despite the promise of these systems for lowering the barriers to automated insect monitoring at scale, they also have the potential to lower the barriers to producing low quality data at scale. One danger of using an automated process for model development and biodiversity monitoring such as the one described here is the potential for models to be trained on insect images that do not match the types of insects expected to visit certain traps and baits. The ease with which automated systems allow new models to be trained and new data to be collected has the potential to enable users with limited knowledge in entomology, ecology, and field trapping methods to deploy models with no way of determining if the assumptions underlying those models are ecologically valid. For example, a model used to detect insects at light traps that is trained naively on all available insect data for a region will invariably include diurnal insects, endoparasites, subterranean insects, and other insect groups that would not reasonably be expected to visit a light at night. As insect visitation to light traps can number in the tens of thousands of individuals per trap per night, even models with very high accuracy will include large numbers of extreme mis-IDs when misspecified in this manner. As such, the training and deployment of automated systems requires a great deal of ecological thought to be successful. Classification models inherently impose a perspective onto the data they process (<xref ref-type="bibr" rid="B79">Winner, 1980</xref>), and if the assumptions baked into a model cannot be trusted then the output of the model cannot be trusted either.</p>
<p>Although we find that BugNet can be successfully used for automated monitoring of multiple insect types across various ecological contexts, it is worth noting that many of the insect diversity patterns we seek to describe as ecologists cannot be fully captured by image data alone, even with perfect systems. Camera traps focus on a particular slice of insect diversity, primarily adult insect diversity (<xref ref-type="bibr" rid="B67">Teixeira et&#xa0;al., 2023</xref>) and only the insect taxa which can successfully be attracted to light traps (<xref ref-type="bibr" rid="B47">McDermott and Mullens, 2018</xref>) or chemical baits (<xref ref-type="bibr" rid="B15">Daterman, 1982</xref>). The insights made from automated means through insect camera trapping cannot fully capture other aspects of insect diversity that may be more visible through traditional means, such as larval diversity (<xref ref-type="bibr" rid="B58">Salcido et&#xa0;al., 2020</xref>) or the diversity of parasitoids found within other insects (<xref ref-type="bibr" rid="B51">Quicke et&#xa0;al., 2024</xref>), and images alone neglect other dimensions of diversity which are known to have significant impacts on ecosystem function, such as intraspecific (<xref ref-type="bibr" rid="B42">Koricheva and Hayes, 2018</xref>; <xref ref-type="bibr" rid="B30">Grele et&#xa0;al., 2024</xref>), functional (<xref ref-type="bibr" rid="B17">Dyer, 2018</xref>), and interaction diversity (<xref ref-type="bibr" rid="B19">Dyer et&#xa0;al., 2010</xref>; <xref ref-type="bibr" rid="B17">Dyer, 2018</xref>). Although requiring a large amount of labor, these facets of biodiversity still remain better observed through traditional means. There are additional benefits to traditional monitoring methods which are lost through automated means. The training and time spent by ecologists in the field allow researchers to build better intuitions about natural systems, observe unexpected behaviors, and develop new hypotheses (<xref ref-type="bibr" rid="B18">Dyer et&#xa0;al., 2024</xref>). Outputs from traditional ecological research often spill over into other fields, such as museum collections from ecological studies which enable research into taxonomy, evolution, and population genetics (<xref ref-type="bibr" rid="B48">Meineke and Davies, 2019</xref>; <xref ref-type="bibr" rid="B71">van Noort, 2024</xref>).</p>
<p>Despite these caveats, automated systems like BugNet have the potential to improve our ability to monitor insect behavior and community dynamics at scale when correctly applied. Human labor, while necessary for many forms of biodiversity monitoring, cannot easily scale to the level necessary to answer certain ecological and conservation questions. If biodiversity is a burning library (<xref ref-type="bibr" rid="B69">V&#xe4;liverronen and Hellsten, 2002</xref>), then while automated systems will not tell us the content of every book they can allow us to better monitor the scale and rate of loss that these highly important systems are suffering, while freeing researchers to spend more time on characterizing individual species and communities before they are lost. As insect losses due to human driven change accelerate, we need tools that can scale to the degree necessary to monitor this loss. By lowering the barriers to creating and deploying these technologies, BugNet is an important step towards automated systems for monitoring patterns of insect abundance and diversity, improving our ability to understand the dynamics of insect systems and monitor their health.</p>
</sec>
</body>
<back>
<sec id="s5" sec-type="data-availability">
<title>Data availability statement</title>
<p>The datasets presented in this study can be found in online repositories. The names of the repository/repositories and accession number(s) can be found below: The pipeline code and data is archived at <uri xlink:href="https://zenodo.org/records/18625900">https://zenodo.org/records/18625900</uri>. Data used in analyses is available from Dryad, <uri xlink:href="https://doi.org/10.5061/dryad.g1jwstr5f">https://doi.org/10.5061/dryad.g1jwstr5f</uri>.</p></sec>
<sec id="s6" sec-type="author-contributions">
<title>Author contributions</title>
<p>AG: Conceptualization, Data curation, Formal analysis, Investigation, Methodology, Project administration, Software, Validation, Visualization, Writing &#x2013; original draft, Writing &#x2013; review &amp; editing. LR: Conceptualization, Funding acquisition, Resources, Supervision, Writing &#x2013; review &amp; editing.</p></sec>
<ack>
<title>Acknowledgments</title>
<p>The authors would like to thank Dr. Lee Dyer for assistance and comradery in the field, as well as feedback on drafts; and Drs. Tom Walla and Harold Greeney for many conversations that informed the development of the data pipeline.</p>
</ack>
<sec id="s8" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The author(s) declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p></sec>
<sec id="s9" sec-type="ai-statement">
<title>Generative AI statement</title>
<p>The author(s) declared that generative AI was not used in the creation of this manuscript.</p></sec>
<sec id="s10" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p></sec>
<sec id="s11" sec-type="supplementary-material">
<title>Supplementary material</title>
<p>The Supplementary Material for this article can be found online at: <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/articles/10.3389/fevo.2026.1750931/full#supplementary-material">https://www.frontiersin.org/articles/10.3389/fevo.2026.1750931/full#supplementary-material</ext-link></p>
<supplementary-material xlink:href="DataSheet1.pdf" id="SM1" mimetype="application/pdf"/></sec>
<ref-list>
<title>References</title>
<ref id="B1">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Alex</surname> <given-names>A. J.</given-names></name>
<name><surname>Barnes</surname> <given-names>C. M.</given-names></name>
<name><surname>MaChado</surname> <given-names>P.</given-names></name>
<name><surname>Ihianle</surname> <given-names>I.</given-names></name>
<name><surname>Mark&#xf3;</surname> <given-names>G.</given-names></name>
<name><surname>Bencsik</surname> <given-names>M.</given-names></name>
<etal/>
</person-group>. (<year>2025</year>). 
<article-title>Enhancing pollinator conservation: Monitoring of bees through object recognition</article-title>. <source>Comput. Electron. Agric.</source> <volume>228</volume>, <elocation-id>109665</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.109665</pub-id>
</mixed-citation>
</ref>
<ref id="B2">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Allen</surname> <given-names>C. E.</given-names></name>
<name><surname>Zwaan</surname> <given-names>B. J.</given-names></name>
<name><surname>Brakefield</surname> <given-names>P. M.</given-names></name>
</person-group> (<year>2011</year>). 
<article-title>Evolution of sexual dimorphism in the lepidoptera</article-title>. <source>Annu. Rev. Entomol.</source> <volume>56</volume>, <fpage>445</fpage>&#x2013;<lpage>464</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1146/annurev-ento-120709-144828</pub-id>, PMID: <pub-id pub-id-type="pmid">20822452</pub-id>
</mixed-citation>
</ref>
<ref id="B3">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Aman</surname> <given-names>A. S.</given-names></name>
<name><surname>Kumar</surname> <given-names>A.</given-names></name>
<name><surname>Mishra</surname> <given-names>P. K.</given-names></name>
<name><surname>Mishra</surname> <given-names>R.</given-names></name>
<name><surname>Tripathi</surname> <given-names>P.</given-names></name>
<name><surname>Rajpoot</surname> <given-names>P. K.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Modern technologies of insect trapping</article-title>. <source>Recent Trends Plant Prot.</source> <volume>15</volume>, <fpage>15</fpage>&#x2013;<lpage>25</lpage>.
</mixed-citation>
</ref>
<ref id="B4">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Badirli</surname> <given-names>S.</given-names></name>
<name><surname>Picard</surname> <given-names>C. J.</given-names></name>
<name><surname>Mohler</surname> <given-names>G.</given-names></name>
<name><surname>Richert</surname> <given-names>F.</given-names></name>
<name><surname>Akata</surname> <given-names>Z.</given-names></name>
<name><surname>Dundar</surname> <given-names>M.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Classifying the unknown: Insect identification with deep hierarchical Bayesian learning</article-title>. <source>Methods Ecol. Evol.</source> <volume>14</volume>, <fpage>1515</fpage>&#x2013;<lpage>1530</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/2041-210X.14104</pub-id>
</mixed-citation>
</ref>
<ref id="B5">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Barbosa</surname> <given-names>P.</given-names></name>
<name><surname>Letourneau</surname> <given-names>D. K.</given-names></name>
<name><surname>Agrawal</surname> <given-names>A. A.</given-names></name>
</person-group> (Eds.) (<year>2012</year>). <source>Insect Outbreaks Revisited</source>. <edition>1st Edn</edition> (<publisher-loc>Hoboken, NJ</publisher-loc>: 
<publisher-name>Wiley</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.1002/9781118295205</pub-id>
</mixed-citation>
</ref>
<ref id="B6">
<mixed-citation publication-type="web">
<person-group person-group-type="author">
<name><surname>Beuchert</surname> <given-names>J.</given-names></name>
<name><surname>Gifford</surname> <given-names>C.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Prototyping a Low-Cost, Low-Power System for Automated Monitoring of Nocturnal Insects</article-title>. Available online at: <uri xlink:href="https://prosquared.org/2024-LSS-Papers/2024-LSS-Beuchert-paper.pdf">https://prosquared.org/2024-LSS-Papers/2024-LSS-Beuchert-paper.pdf</uri> (Accessed <date-in-citation content-type="access-date">May 1, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B7">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Bjerge</surname> <given-names>K.</given-names></name>
<name><surname>Geissmann</surname> <given-names>Q.</given-names></name>
<name><surname>Alison</surname> <given-names>J.</given-names></name>
<name><surname>Mann</surname> <given-names>H. M. R.</given-names></name>
<name><surname>H&#xf8;ye</surname> <given-names>T. T.</given-names></name>
<name><surname>Dyrmann</surname> <given-names>M.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Hierarchical classification of insects with multitask learning and anomaly detection</article-title>. <source>Ecol. Inf.</source> <volume>77</volume>, <elocation-id>102278</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ecoinf.2023.102278</pub-id>
</mixed-citation>
</ref>
<ref id="B8">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Boulent</surname> <given-names>J.</given-names></name>
<name><surname>Charry</surname> <given-names>B.</given-names></name>
<name><surname>Kennedy</surname> <given-names>M. M.</given-names></name>
<name><surname>Tissier</surname> <given-names>E.</given-names></name>
<name><surname>Fan</surname> <given-names>R.</given-names></name>
<name><surname>Marcoux</surname> <given-names>M.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Scaling whale monitoring using deep learning: A human-in-the-loop solution for analyzing aerial datasets</article-title>. <source>Front. Mar. Sci.</source> <volume>10</volume>, <elocation-id>1099479</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fmars.2023.1099479</pub-id>
</mixed-citation>
</ref>
<ref id="B9">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Brodersen</surname> <given-names>K. H.</given-names></name>
<name><surname>Ong</surname> <given-names>C. S.</given-names></name>
<name><surname>Stephan</surname> <given-names>K. E.</given-names></name>
<name><surname>Buhmann</surname> <given-names>J. M.</given-names></name>
</person-group> (<year>2010</year>). &#x201c;
<article-title>The balanced accuracy and its posterior distribution</article-title>,&#x201d; in <source>2010 20th International Conference on Pattern Recognition</source> (
<publisher-name>IEEE</publisher-name>, <publisher-loc>Istanbul, Turkey</publisher-loc>), <fpage>3121</fpage>&#x2013;<lpage>3124</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ICPR.2010.764</pub-id>
</mixed-citation>
</ref>
<ref id="B10">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Campbell</surname> <given-names>C. J.</given-names></name>
<name><surname>Barve</surname> <given-names>V.</given-names></name>
<name><surname>Belitz</surname> <given-names>M. W.</given-names></name>
<name><surname>Doby</surname> <given-names>J. R.</given-names></name>
<name><surname>White</surname> <given-names>E.</given-names></name>
<name><surname>Seltzer</surname> <given-names>C.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Identifying the identifiers: How iNaturalist facilitates collaborative, research-relevant data generation and why it matters for biodiversity science</article-title>. <source>BioScience</source> <volume>73</volume>, <fpage>533</fpage>&#x2013;<lpage>541</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/biosci/biad051</pub-id>
</mixed-citation>
</ref>
<ref id="B11">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chamberlain</surname> <given-names>S. A.</given-names></name>
<name><surname>Boettiger</surname> <given-names>C.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>R Python, and Ruby clients for GBIF species occurrence data</article-title>. doi:&#xa0;<pub-id pub-id-type="doi">10.7287/peerj.preprints.3304v1</pub-id>
</mixed-citation>
</ref>
<ref id="B12">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Chen</surname> <given-names>Y.</given-names></name>
<name><surname>Lang</surname> <given-names>N.</given-names></name>
<name><surname>Schmidt</surname> <given-names>B. C.</given-names></name>
<name><surname>Jain</surname> <given-names>A.</given-names></name>
<name><surname>Basset</surname> <given-names>Y.</given-names></name>
<name><surname>Beery</surname> <given-names>S.</given-names></name>
<etal/>
</person-group>. (<year>2025</year>). &#x201c;
<article-title>Open-insect: Benchmarking open-set recognition of novel species in biodiversity monitoring</article-title>,&#x201d; in <conf-name>The Thirty-ninth Annual Conference on Neural Information Processing Systems Datasets and Benchmarks Track</conf-name>.
</mixed-citation>
</ref>
<ref id="B13">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chown</surname> <given-names>S. L.</given-names></name>
<name><surname>Gaston</surname> <given-names>K. J.</given-names></name>
</person-group> (<year>2010</year>). 
<article-title>Body size variation in insects: a macroecological perspective</article-title>. <source>Biol. Rev.</source> <volume>85</volume>, <fpage>139</fpage>&#x2013;<lpage>169</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/j.1469-185x.2009.00097.x</pub-id>, PMID: <pub-id pub-id-type="pmid">20015316</pub-id>
</mixed-citation>
</ref>
<ref id="B14">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>da F. Costa</surname> <given-names>L.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Further Generalizations of the Jaccard Index</article-title>. <source>arXiv</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.2110.09619</pub-id>
</mixed-citation>
</ref>
<ref id="B15">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Daterman</surname> <given-names>G. E.</given-names></name>
</person-group> (<year>1982</year>). &#x201c;
<article-title>Monitoring insects with pheromones: trapping objectives and bait formulations *</article-title>,&#x201d; in <source>Insect Suppression with Controlled Release Pheromone Systems</source> (<publisher-loc>Boca Raton, FL</publisher-loc>: 
<publisher-name>CRC Press</publisher-name>).
</mixed-citation>
</ref>
<ref id="B16">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Duffus</surname> <given-names>N. E.</given-names></name>
<name><surname>Christie</surname> <given-names>C. R.</given-names></name>
<name><surname>Morimoto</surname> <given-names>J.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Insect cultural services: how insects have changed our lives and how can we do better for them</article-title>. <source>Insects</source> <volume>12</volume>, <elocation-id>377</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/insects12050377</pub-id>, PMID: <pub-id pub-id-type="pmid">33921962</pub-id>
</mixed-citation>
</ref>
<ref id="B17">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Dyer</surname> <given-names>L. A.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>Multidimensional diversity associated with plants</article-title>. <source>Am. J. Bot.</source> <volume>105</volume>, <fpage>1439</fpage>&#x2013;<lpage>1442</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/ajb2.1147</pub-id>, PMID: <pub-id pub-id-type="pmid">30151878</pub-id>
</mixed-citation>
</ref>
<ref id="B18">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Dyer</surname> <given-names>L. A.</given-names></name>
<name><surname>Smilanich</surname> <given-names>A. M.</given-names></name>
<name><surname>Gompert</surname> <given-names>Z.</given-names></name>
<name><surname>Forister</surname> <given-names>M. L.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Insect conservation, technological traps, and the fading arts of natural history and field ecology</article-title>. <source>Curr. Opin. Insect Sci.</source> <volume>66</volume>, <elocation-id>101261</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.cois.2024.101261</pub-id>, PMID: <pub-id pub-id-type="pmid">39255900</pub-id>
</mixed-citation>
</ref>
<ref id="B19">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Dyer</surname> <given-names>L. A.</given-names></name>
<name><surname>Walla</surname> <given-names>T. R.</given-names></name>
<name><surname>Greeney</surname> <given-names>H. F.</given-names></name>
<name><surname>Stireman Iii</surname> <given-names>J. O.</given-names></name>
<name><surname>Hazen</surname> <given-names>R. F.</given-names></name>
</person-group> (<year>2010</year>). 
<article-title>Diversity of interactions: A metric for studies of biodiversity</article-title>. <source>Biotropica</source> <volume>42</volume>, <fpage>281</fpage>&#x2013;<lpage>289</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/j.1744-7429.2009.00624.x</pub-id>
</mixed-citation>
</ref>
<ref id="B20">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Edwards</surname> <given-names>C. B.</given-names></name>
<name><surname>Zipkin</surname> <given-names>E. F.</given-names></name>
<name><surname>Henry</surname> <given-names>E. H.</given-names></name>
<name><surname>Haddad</surname> <given-names>N. M.</given-names></name>
<name><surname>Forister</surname> <given-names>M. L.</given-names></name>
<name><surname>Burls</surname> <given-names>K. J.</given-names></name>
<etal/>
</person-group>. (<year>2025</year>). 
<article-title>Rapid butterfly declines across the United States during the 21st century</article-title>. <source>Science</source> <volume>387</volume>, <fpage>1090</fpage>&#x2013;<lpage>1094</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1126/science.adp4671</pub-id>, PMID: <pub-id pub-id-type="pmid">40048533</pub-id>
</mixed-citation>
</ref>
<ref id="B21">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Evans</surname> <given-names>L.</given-names></name>
<name><surname>Weinstein</surname> <given-names>P.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Begone from me, O crooked-lips! Integrated pest management in ancient Egypt</article-title>. <source>Am. Entomol.</source> <volume>67</volume>, <fpage>46</fpage>&#x2013;<lpage>53</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/ae/tmab010</pub-id>
</mixed-citation>
</ref>
<ref id="B22">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Fijen</surname> <given-names>T. P. M.</given-names></name>
<name><surname>Kleijn</surname> <given-names>D.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>How to efficiently obtain accurate estimates of flower visitation rates by pollinators</article-title>. <source>Basic. Appl. Ecol.</source> <volume>19</volume>, <fpage>11</fpage>&#x2013;<lpage>18</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.baae.2017.01.004</pub-id>
</mixed-citation>
</ref>
<ref id="B23">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Flint</surname> <given-names>M. L.</given-names></name>
<name><surname>van den Bosch</surname> <given-names>R.</given-names></name>
</person-group> (<year>1981</year>). &#x201c;
<article-title>A history of pest control</article-title>,&#x201d; in <source>Introduction to Integrated Pest Management</source>. Eds. 
<person-group person-group-type="editor">
<name><surname>Flint</surname> <given-names>M. L.</given-names></name>
<name><surname>van den Bosch</surname> <given-names>R.</given-names></name>
</person-group> (
<publisher-name>Springer US</publisher-name>, <publisher-loc>Boston, MA</publisher-loc>), <fpage>51</fpage>&#x2013;<lpage>81</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/978-1-4615-9212-9_4</pub-id>
</mixed-citation>
</ref>
<ref id="B24">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Fr&#xfc;nd</surname> <given-names>J.</given-names></name>
<name><surname>Linsenmair</surname> <given-names>K. E.</given-names></name>
<name><surname>Bl&#xfc;thgen</surname> <given-names>N.</given-names></name>
</person-group> (<year>2010</year>). 
<article-title>Pollinator diversity and specialization in relation to flower diversity</article-title>. <source>Oikos</source> <volume>119</volume>, <fpage>1581</fpage>&#x2013;<lpage>1590</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/j.1600-0706.2010.18450.x</pub-id>
</mixed-citation>
</ref>
<ref id="B25">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Fukunaga</surname> <given-names>K.</given-names></name>
<name><surname>Flick</surname> <given-names>T. E.</given-names></name>
</person-group> (<year>1984</year>). 
<article-title>Classification error for a very large number of classes</article-title>. <source>IEEE Trans. Pattern Anal. Mach. Intell.</source> <volume>PAMI-6</volume>, <fpage>779</fpage>&#x2013;<lpage>788</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/tpami.1984.4767601</pub-id>, PMID: <pub-id pub-id-type="pmid">22499658</pub-id>
</mixed-citation>
</ref>
<ref id="B26">
<mixed-citation publication-type="other">
<person-group person-group-type="author"><collab>GBIF Backbone Taxonomy</collab>
</person-group> (<year>2025</year>). doi:&#xa0;<pub-id pub-id-type="doi">10.15468/39omei</pub-id>
</mixed-citation>
</ref>
<ref id="B27">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Geissmann</surname> <given-names>Q.</given-names></name>
<name><surname>Abram</surname> <given-names>P. K.</given-names></name>
<name><surname>Wu</surname> <given-names>D.</given-names></name>
<name><surname>Haney</surname> <given-names>C. H.</given-names></name>
<name><surname>Carrillo</surname> <given-names>J.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Sticky Pi is a high-frequency smart trap that enables the study of insect circadian activity under natural conditions</article-title>. <source>PloS Biol.</source> <volume>20</volume>, <fpage>e3001689</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pbio.3001689</pub-id>, PMID: <pub-id pub-id-type="pmid">35797311</pub-id>
</mixed-citation>
</ref>
<ref id="B28">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gilpin</surname> <given-names>A.</given-names></name>
<name><surname>Denham</surname> <given-names>A. J.</given-names></name>
<name><surname>Ayre</surname> <given-names>D. J.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>The use of digital video recorders in pollination biology</article-title>. <source>Ecol. Entomol.</source> <volume>42</volume>, <fpage>383</fpage>&#x2013;<lpage>388</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/een.12394</pub-id>
</mixed-citation>
</ref>
<ref id="B29">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gon&#xe7;alves-Souza</surname> <given-names>T.</given-names></name>
<name><surname>Vancine</surname> <given-names>M. H.</given-names></name>
<name><surname>Paterno</surname> <given-names>G. B.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Computer vision uncovers trait-based insect responses to habitat loss</article-title>. <source>J. Anim. Ecol.</source> <volume>95</volume>, <fpage>1365</fpage>&#x2013;<lpage>2656.70165</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/1365-2656.70165</pub-id>, PMID: <pub-id pub-id-type="pmid">41128069</pub-id>
</mixed-citation>
</ref>
<ref id="B30">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Grele</surname> <given-names>A.</given-names></name>
<name><surname>Massad</surname> <given-names>T. J.</given-names></name>
<name><surname>Uckele</surname> <given-names>K. A.</given-names></name>
<name><surname>Dyer</surname> <given-names>L. A.</given-names></name>
<name><surname>Antonini</surname> <given-names>Y.</given-names></name>
<name><surname>Braga</surname> <given-names>L.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Intra- and interspecific diversity in a tropical plant clade alter herbivory and ecosystem resilience</article-title>. <source>eLife</source> <volume>12</volume>, <fpage>RP86988</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.7554/eLife.86988</pub-id>, PMID: <pub-id pub-id-type="pmid">38662411</pub-id>
</mixed-citation>
</ref>
<ref id="B31">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Hand</surname> <given-names>D. J.</given-names></name>
<name><surname>Till</surname> <given-names>R. J.</given-names></name>
</person-group> (<year>2001</year>). 
<article-title>A simple generalisation of the area under the ROC curve for multiple class classification problems</article-title>. <source>Mach. Learn.</source> <volume>45</volume>, <fpage>171</fpage>&#x2013;<lpage>186</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1023/A:1010920819831</pub-id>
</mixed-citation>
</ref>
<ref id="B32">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Houadria</surname> <given-names>M.</given-names></name>
<name><surname>Bl&#xfc;thgen</surname> <given-names>N.</given-names></name>
<name><surname>Salas-Lopez</surname> <given-names>A.</given-names></name>
<name><surname>Schmitt</surname> <given-names>M.-I.</given-names></name>
<name><surname>Arndt</surname> <given-names>J.</given-names></name>
<name><surname>Schneider</surname> <given-names>E.</given-names></name>
<etal/>
</person-group>. (<year>2016</year>). 
<article-title>The relation between circadian asynchrony, functional redundancy, and trophic performance in tropical ant communities</article-title>. <source>Ecology</source> <volume>97</volume>, <fpage>225</fpage>&#x2013;<lpage>235</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1890/14-2466.1</pub-id>, PMID: <pub-id pub-id-type="pmid">27008791</pub-id>
</mixed-citation>
</ref>
<ref id="B33">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jain</surname> <given-names>A.</given-names></name>
<name><surname>Cunha</surname> <given-names>F.</given-names></name>
<name><surname>Bunsen</surname> <given-names>M. J.</given-names></name>
<name><surname>Ca&#xf1;as</surname> <given-names>J. S.</given-names></name>
<name><surname>Pasi</surname> <given-names>L.</given-names></name>
<name><surname>Pinoy</surname> <given-names>N.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Insect identification in the Wild: The AMI Dataset</article-title>. <source>arXiv</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.2406.12452</pub-id>
</mixed-citation>
</ref>
<ref id="B34">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Janzen</surname> <given-names>D.</given-names></name>
<name><surname>Hallwachs</surname> <given-names>W.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>How a tropical country can DNA barcode itself</article-title>. <source>iBOL. Barcode. Bull.</source> <volume>9</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.21083/ibol.v9i1.5526</pub-id>
</mixed-citation>
</ref>
<ref id="B35">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Jocher</surname> <given-names>G.</given-names></name>
<name><surname>Stoken</surname> <given-names>A.</given-names></name>
<name><surname>Borovec</surname> <given-names>J.</given-names></name>
</person-group> (<year>2020</year>). &#x201c;
<article-title>NanoCode012, christopherSTAN</article-title>,&#x201d; in <source>Ultralytics/yolov5: v3.0</source>. Ed. 
<person-group person-group-type="editor">
<name><surname>Liu</surname> <given-names>C.</given-names></name>
<etal/>
</person-group> (<publisher-loc>Geneva, CH</publisher-loc>: 
<publisher-name>Zenodo</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.5281/zenodo.3983579</pub-id>
</mixed-citation>
</ref>
<ref id="B36">
<mixed-citation publication-type="journal">
<person-group person-group-type="author"><collab>Jude Chukwura Obi</collab>
</person-group> (<year>2023</year>). 
<article-title>A comparative study of several classification metrics and their performances on data</article-title>. <source>World J. Adv. Eng. Technol. Sci.</source> <volume>8</volume>, <fpage>308</fpage>&#x2013;<lpage>314</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.30574/wjaets.2023.8.1.0054</pub-id>
</mixed-citation>
</ref>
<ref id="B37">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kahl</surname> <given-names>S.</given-names></name>
<name><surname>Wood</surname> <given-names>C. M.</given-names></name>
<name><surname>Eibl</surname> <given-names>M.</given-names></name>
<name><surname>Klinck</surname> <given-names>H.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>BirdNET: A deep learning solution for avian diversity monitoring</article-title>. <source>Ecol. Inf.</source> <volume>61</volume>, <elocation-id>101236</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ecoinf.2021.101236</pub-id>
</mixed-citation>
</ref>
<ref id="B38">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Keasar</surname> <given-names>T.</given-names></name>
<name><surname>Wajnberg</surname> <given-names>E.</given-names></name>
<name><surname>Heimpel</surname> <given-names>G.</given-names></name>
<name><surname>Hardy</surname> <given-names>I. C. W.</given-names></name>
<name><surname>Harpaz</surname> <given-names>L. S.</given-names></name>
<name><surname>Gottlieb</surname> <given-names>D.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Dynamic economic thresholds for insecticide applications against agricultural pests: importance of pest and natural enemy migration</article-title>. <source>J. Econ. Entomol.</source> <volume>116</volume>, <fpage>321</fpage>&#x2013;<lpage>330</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/jee/toad019</pub-id>, PMID: <pub-id pub-id-type="pmid">36791247</pub-id>
</mixed-citation>
</ref>
<ref id="B39">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Kelling</surname> <given-names>S.</given-names></name>
<name><surname>Gerbracht</surname> <given-names>J.</given-names></name>
<name><surname>Fink</surname> <given-names>D.</given-names></name>
<name><surname>Lagoze</surname> <given-names>C.</given-names></name>
<name><surname>Wong</surname> <given-names>W.-K.</given-names></name>
<name><surname>Yu</surname> <given-names>J. Y.</given-names></name>
<etal/>
</person-group>. (<year>2012</year>). &#x201c;
<article-title>ebird: A human/computer learning network for biodiversity conservation and research</article-title>,&#x201d; in <conf-name>Proceedings of the AAAI Conference on Artificial Intelligence</conf-name>.  <volume>26</volume>, <fpage>2229</fpage>&#x2013;<lpage>2236</lpage>.
</mixed-citation>
</ref>
<ref id="B40">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kendall</surname> <given-names>L.</given-names></name>
<name><surname>Nicholson</surname> <given-names>C. C.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Pollination across the diel cycle: A global meta-analysis</article-title>. <source>Ecol. Lett.</source> <volume>28</volume>, <fpage>e70036</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/ele.70036</pub-id>, PMID: <pub-id pub-id-type="pmid">39737683</pub-id>
</mixed-citation>
</ref>
<ref id="B41">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kleijn</surname> <given-names>D.</given-names></name>
<name><surname>Winfree</surname> <given-names>R.</given-names></name>
<name><surname>Bartomeus</surname> <given-names>I.</given-names></name>
<name><surname>Carvalheiro</surname> <given-names>L. G.</given-names></name>
<name><surname>Henry</surname> <given-names>M.</given-names></name>
<name><surname>Isaacs</surname> <given-names>R.</given-names></name>
<etal/>
</person-group>. (<year>2015</year>). 
<article-title>Delivery of crop pollination services is an insufficient argument for wild pollinator conservation</article-title>. <source>Nat. Commun.</source> <volume>6</volume>, <fpage>7414</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/ncomms8414</pub-id>, PMID: <pub-id pub-id-type="pmid">26079893</pub-id>
</mixed-citation>
</ref>
<ref id="B42">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Koricheva</surname> <given-names>J.</given-names></name>
<name><surname>Hayes</surname> <given-names>D.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>The relative importance of plant intraspecific diversity in structuring arthropod communities: A meta-analysis</article-title>. <source>Funct. Ecol.</source> <volume>32</volume>, <fpage>1704</fpage>&#x2013;<lpage>1717</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/1365-2435.13062</pub-id>
</mixed-citation>
</ref>
<ref id="B43">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lawson</surname> <given-names>D. A.</given-names></name>
<name><surname>Rands</surname> <given-names>S. A.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>The effects of rainfall on plant&#x2013;pollinator interactions</article-title>. <source>Arthropod-Plant. Interact.</source> <volume>13</volume>, <fpage>561</fpage>&#x2013;<lpage>569</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11829-019-09686-z</pub-id>
</mixed-citation>
</ref>
<ref id="B44">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Leopold</surname> <given-names>T.</given-names></name>
<name><surname>Jantsch</surname> <given-names>A.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Colorado potato Beetle Dataset and Detection for Monitoring and Management in Potato Fields</article-title>. <conf-name>Austrian Symposion on AI, Robotics and Vision</conf-name>, <conf-loc>Innsbruck, AU</conf-loc>.
</mixed-citation>
</ref>
<ref id="B45">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lin</surname> <given-names>T.-Y.</given-names></name>
<name><surname>Maire</surname> <given-names>M.</given-names></name>
<name><surname>Belongie</surname> <given-names>S.</given-names></name>
<name><surname>Bourdev</surname> <given-names>L.</given-names></name>
<name><surname>Girshick</surname> <given-names>R.</given-names></name>
<name><surname>Hays</surname> <given-names>J.</given-names></name>
<etal/>
</person-group>. (<year>2015</year>). 
<article-title>Microsoft COCO: Common Objects in Context</article-title>. <source>arXiv</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.1405.0312</pub-id>
</mixed-citation>
</ref>
<ref id="B46">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Liu</surname> <given-names>W.</given-names></name>
<name><surname>Wang</surname> <given-names>X.</given-names></name>
<name><surname>Owens</surname> <given-names>J.</given-names></name>
<name><surname>Li</surname> <given-names>Y.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Energy-based out-of-distribution detection</article-title>. <source>Adv. Neural Inf. Process. Syst.</source> <volume>33</volume>, <fpage>21464</fpage>&#x2013;<lpage>21475</lpage>.
</mixed-citation>
</ref>
<ref id="B47">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>McDermott</surname> <given-names>E. G.</given-names></name>
<name><surname>Mullens</surname> <given-names>B. A.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>The dark side of light traps</article-title>. <source>J. Med. Entomol.</source> <volume>55</volume>, <fpage>251</fpage>&#x2013;<lpage>261</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/jme/tjx207</pub-id>, PMID: <pub-id pub-id-type="pmid">29211869</pub-id>
</mixed-citation>
</ref>
<ref id="B48">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Meineke</surname> <given-names>E. K.</given-names></name>
<name><surname>Davies</surname> <given-names>T. J.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Museum specimens provide novel insights into changing plant&#x2013;herbivore interactions</article-title>. <source>Phil. Trans. R. Soc B.</source> <volume>374</volume>, <fpage>20170393</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1098/rstb.2017.0393</pub-id>, PMID: <pub-id pub-id-type="pmid">30455211</pub-id>
</mixed-citation>
</ref>
<ref id="B49">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Opp</surname> <given-names>S. B.</given-names></name>
<name><surname>Prokopy</surname> <given-names>R. J.</given-names></name>
</person-group> (<year>1986</year>). &#x201c;
<article-title>Approaches and methods for direct behavioral observation and analysis of plant-insect interactions</article-title>,&#x201d; in <source>Insect-Plant Interactions</source>. Eds. 
<person-group person-group-type="editor">
<name><surname>Miller</surname> <given-names>J. R.</given-names></name>
<name><surname>Miller</surname> <given-names>T. A.</given-names></name>
</person-group> (
<publisher-name>Springer</publisher-name>, <publisher-loc>New York, NY</publisher-loc>), <fpage>1</fpage>&#x2013;<lpage>22</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/978-1-4612-4910-8_1</pub-id>
</mixed-citation>
</ref>
<ref id="B50">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Pegoraro</surname> <given-names>L.</given-names></name>
<name><surname>Hidalgo</surname> <given-names>O.</given-names></name>
<name><surname>Leitch</surname> <given-names>I. J.</given-names></name>
<name><surname>Pellicer</surname> <given-names>J.</given-names></name>
<name><surname>Barlow</surname> <given-names>S. E.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Automated video monitoring of insect pollinators in the field</article-title>. <source>Emerging. Topics. Life Sci.</source> <volume>4</volume>, <fpage>87</fpage>&#x2013;<lpage>97</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1042/ETLS20190074</pub-id>, PMID: <pub-id pub-id-type="pmid">32558902</pub-id>
</mixed-citation>
</ref>
<ref id="B51">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Quicke</surname> <given-names>D. L. J.</given-names></name>
<name><surname>Janzen</surname> <given-names>D. H.</given-names></name>
<name><surname>Hallwachs</surname> <given-names>W.</given-names></name>
<name><surname>Sharkey</surname> <given-names>M. J.</given-names></name>
<name><surname>Hebert</surname> <given-names>P. D. N.</given-names></name>
<name><surname>Butcher</surname> <given-names>B. A.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Forty-five years of caterpillar rearing in area de conservaci&#xf3;n guanacaste (ACG) northwestern Costa Rica: DNA barcodes, BINs, and a first description of plant&#x2013;caterpillar&#x2013;ichneumonoid interactions detected</article-title>. <source>Diversity</source> <volume>16</volume>, <elocation-id>683</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/d16110683</pub-id>
</mixed-citation>
</ref>
<ref id="B52">
<mixed-citation publication-type="web">
<person-group person-group-type="author"><collab>R: The R Project for Statistical Computing</collab>
</person-group> (<year>2025</year>). Available online at: <uri xlink:href="https://www.r-project.org/">https://www.r-project.org/</uri> (Accessed <date-in-citation content-type="access-date">April 28, 2025</date-in-citation>).
</mixed-citation>
</ref>
<ref id="B53">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ratnayake</surname> <given-names>M. N.</given-names></name>
<name><surname>Dyer</surname> <given-names>A. G.</given-names></name>
<name><surname>Dorin</surname> <given-names>A.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Tracking individual honeybees among wildflower clusters with computer vision-facilitated pollinator monitoring</article-title>. <source>PloS One</source> <volume>16</volume>, <fpage>e0239504</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pone.0239504</pub-id>, PMID: <pub-id pub-id-type="pmid">33571210</pub-id>
</mixed-citation>
</ref>
<ref id="B54">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Redmon</surname> <given-names>J.</given-names></name>
<name><surname>Divvala</surname> <given-names>S.</given-names></name>
<name><surname>Girshick</surname> <given-names>R.</given-names></name>
<name><surname>Farhadi</surname> <given-names>A.</given-names></name>
</person-group> (<year>2016</year>). &#x201c;
<article-title>You only look once: unified, real-time object detection</article-title>,&#x201d; in <conf-name>2016 IEEE Conference on Computer Vision and Pattern Recognition (CVPR)</conf-name>, <conf-loc>Las Vegas, NV, USA</conf-loc>. <fpage>779</fpage>&#x2013;<lpage>788</lpage> (
<publisher-name>IEEE</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.1109/CVPR.2016.91</pub-id>
</mixed-citation>
</ref>
<ref id="B55">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Redmon</surname> <given-names>J.</given-names></name>
<name><surname>Farhadi</surname> <given-names>A.</given-names></name>
</person-group> (<year>2017</year>). &#x201c;
<article-title>YOLO9000: better, faster, stronger</article-title>,&#x201d; in <conf-name>2017 IEEE Conference on Computer Vision and Pattern Recognition (CVPR)</conf-name>, <conf-loc>Honolulu, HI</conf-loc>. <fpage>6517</fpage>&#x2013;<lpage>6525</lpage> (
<publisher-name>IEEE</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.1109/CVPR.2017.690</pub-id>
</mixed-citation>
</ref>
<ref id="B56">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Reyes-Gonz&#xe1;lez</surname> <given-names>R.</given-names></name>
<name><surname>Toledo-Hern&#xe1;ndez</surname> <given-names>V. H.</given-names></name>
<name><surname>Flores-Palacios</surname> <given-names>A.</given-names></name>
<name><surname>R&#xf6;s</surname> <given-names>M.</given-names></name>
<name><surname>Bueno-Villegas</surname> <given-names>J.</given-names></name>
<name><surname>Corona-L&#xf3;pez</surname> <given-names>A. M.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Exploring the proportion of rarity in tropical insects: evaluating hypotheses and variables</article-title>. <source>&#xc9;coscience</source> <volume>31</volume>, <fpage>229</fpage>&#x2013;<lpage>238</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/11956860.2024.2435133</pub-id>
</mixed-citation>
</ref>
<ref id="B57">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Roy</surname> <given-names>D. B.</given-names></name>
<name><surname>Alison</surname> <given-names>J.</given-names></name>
<name><surname>August</surname> <given-names>T. A.</given-names></name>
<name><surname>B&#xe9;lisle</surname> <given-names>M.</given-names></name>
<name><surname>Bjerge</surname> <given-names>K.</given-names></name>
<name><surname>Bowden</surname> <given-names>J. J.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Towards a standardized framework for AI-assisted, image-based monitoring of nocturnal insects</article-title>. <source>Phil. Trans. R. Soc B.</source> <volume>379</volume>, <fpage>20230108</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1098/rstb.2023.0108</pub-id>, PMID: <pub-id pub-id-type="pmid">38705190</pub-id>
</mixed-citation>
</ref>
<ref id="B58">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Salcido</surname> <given-names>D. M.</given-names></name>
<name><surname>Forister</surname> <given-names>M. L.</given-names></name>
<name><surname>Garcia Lopez</surname> <given-names>H.</given-names></name>
<name><surname>Dyer</surname> <given-names>L. A.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Loss of dominant caterpillar genera in a protected tropical forest</article-title>. <source>Sci. Rep.</source> <volume>10</volume>, <fpage>422</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41598-019-57226-9</pub-id>, PMID: <pub-id pub-id-type="pmid">31949238</pub-id>
</mixed-citation>
</ref>
<ref id="B59">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Scheffers</surname> <given-names>B. R.</given-names></name>
<name><surname>Joppa</surname> <given-names>L. N.</given-names></name>
<name><surname>Pimm</surname> <given-names>S. L.</given-names></name>
<name><surname>Laurance</surname> <given-names>W. F.</given-names></name>
</person-group> (<year>2012</year>). 
<article-title>What we know and don&#x2019;t know about Earth&#x2019;s missing biodiversity</article-title>. <source>Trends Ecol. Evol.</source> <volume>27</volume>, <fpage>501</fpage>&#x2013;<lpage>510</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.tree.2012.05.008</pub-id>, PMID: <pub-id pub-id-type="pmid">22784409</pub-id>
</mixed-citation>
</ref>
<ref id="B60">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Schneider</surname> <given-names>S.</given-names></name>
<name><surname>Taylor</surname> <given-names>G. W.</given-names></name>
<name><surname>Linquist</surname> <given-names>S.</given-names></name>
<name><surname>Kremer</surname> <given-names>S. C.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Past, present and future approaches using computer vision for animal re-identification from camera trap data</article-title>. <source>Methods Ecol. Evol.</source> <volume>10</volume>, <fpage>461</fpage>&#x2013;<lpage>470</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/2041-210X.13133</pub-id>
</mixed-citation>
</ref>
<ref id="B61">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Seymour</surname> <given-names>M.</given-names></name>
<name><surname>Roslin</surname> <given-names>T.</given-names></name>
<name><surname>deWaard</surname> <given-names>J. R.</given-names></name>
<name><surname>Perez</surname> <given-names>K. H. J.</given-names></name>
<name><surname>D&#x2019;Souza</surname> <given-names>M. L.</given-names></name>
<name><surname>Ratnasingham</surname> <given-names>S.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Global arthropod beta-diversity is spatially and temporally structured by latitude</article-title>. <source>Commun. Biol.</source> <volume>7</volume>, <fpage>1</fpage>&#x2013;<lpage>11</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s42003-024-06199-1</pub-id>, PMID: <pub-id pub-id-type="pmid">38720028</pub-id>
</mixed-citation>
</ref>
<ref id="B62">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Souza</surname> <given-names>C. S.</given-names></name>
<name><surname>Oliveira</surname> <given-names>P. E.</given-names></name>
<name><surname>Rosa</surname> <given-names>B. B.</given-names></name>
<name><surname>Maruyama</surname> <given-names>P. K.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Integrating nocturnal and diurnal interactions in a Neotropical pollination network</article-title>. <source>J. Ecol.</source> <volume>110</volume>, <fpage>2145</fpage>&#x2013;<lpage>2155</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/1365-2745.13937</pub-id>
</mixed-citation>
</ref>
<ref id="B63">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Stork</surname> <given-names>N. E.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>How many species of insects and other terrestrial arthropods are there on earth</article-title>? <source>Annu. Rev. Entomol.</source> <volume>63</volume>, <fpage>31</fpage>&#x2013;<lpage>45</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1146/annurev-ento-020117-043348</pub-id>, PMID: <pub-id pub-id-type="pmid">28938083</pub-id>
</mixed-citation>
</ref>
<ref id="B64">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>T&#x2019;ai</surname> <given-names>H. R.</given-names></name>
<name><surname>Smith</surname> <given-names>S. A.</given-names></name>
<name><surname>Brewster</surname> <given-names>A. L.</given-names></name>
</person-group> (<year>2007</year>). 
<article-title>A comparison of pan trap and intensive net sampling techniques for documenting a bee (Hymenoptera: Apiformes) fauna</article-title>. <source>J. Kansas. Entomol. Soc.</source> <volume>80</volume>, <fpage>179</fpage>&#x2013;<lpage>181</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.2317/0022-8567(2007)80[179:ACOPTA]2.0.CO;2</pub-id>
</mixed-citation>
</ref>
<ref id="B65">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tamburini</surname> <given-names>G.</given-names></name>
<name><surname>Berti</surname> <given-names>A.</given-names></name>
<name><surname>Morari</surname> <given-names>F.</given-names></name>
<name><surname>Marini</surname> <given-names>L.</given-names></name>
</person-group> (<year>2016</year>). 
<article-title>Degradation of soil fertility can cancel pollination benefits in sunflower</article-title>. <source>Oecologia</source> <volume>180</volume>, <fpage>581</fpage>&#x2013;<lpage>587</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00442-015-3493-1</pub-id>, PMID: <pub-id pub-id-type="pmid">26527463</pub-id>
</mixed-citation>
</ref>
<ref id="B66">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Tan</surname> <given-names>M.</given-names></name>
<name><surname>Le</surname> <given-names>Q. V.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks</article-title>. <conf-name>Proceedings of the 36 th International Conference on Machine Learning</conf-name>, <conf-loc>Long beach, CA</conf-loc>, pp. <fpage>6105</fpage>&#x2013;<lpage>6114</lpage>.
</mixed-citation>
</ref>
<ref id="B67">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Teixeira</surname> <given-names>A. C.</given-names></name>
<name><surname>Ribeiro</surname> <given-names>J.</given-names></name>
<name><surname>Morais</surname> <given-names>R.</given-names></name>
<name><surname>Sousa</surname> <given-names>J. J.</given-names></name>
<name><surname>Cunha</surname> <given-names>A.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>A systematic review on automatic insect detection using deep learning</article-title>. <source>Agriculture</source> <volume>13</volume>, <fpage>713</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture13030713</pub-id>
</mixed-citation>
</ref>
<ref id="B68">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Truong</surname> <given-names>T.-D.</given-names></name>
<name><surname>Nguyen</surname> <given-names>H.-Q.</given-names></name>
<name><surname>Nguyen</surname> <given-names>X.-B.</given-names></name>
<name><surname>Dowling</surname> <given-names>A.</given-names></name>
<name><surname>Li</surname> <given-names>X.</given-names></name>
<name><surname>Luu</surname> <given-names>K.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Insect-Foundation: A Foundation Model and Large Multimodal Dataset for Vision-Language Insect Understanding</article-title>. <source>arXiv</source>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/arXiv.2502.09906</pub-id>
</mixed-citation>
</ref>
<ref id="B69">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>V&#xe4;liverronen</surname> <given-names>E.</given-names></name>
<name><surname>Hellsten</surname> <given-names>I.</given-names></name>
</person-group> (<year>2002</year>). 
<article-title>From &#x201c;Burning library&#x201d; to &#x201c;Green medicine&#x201d;: the role of metaphors in communicating biodiversity</article-title>. <source>Sci. Commun.</source> <volume>24</volume>, <fpage>229</fpage>&#x2013;<lpage>245</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1177/107554702237848</pub-id>
</mixed-citation>
</ref>
<ref id="B70">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Van Horn</surname> <given-names>G.</given-names></name>
<name><surname>Mac Aodha</surname> <given-names>O.</given-names></name>
<name><surname>Song</surname> <given-names>Y.</given-names></name>
<name><surname>Cui</surname> <given-names>Y.</given-names></name>
<name><surname>Sun</surname> <given-names>C.</given-names></name>
<name><surname>Shepard</surname> <given-names>A.</given-names></name>
<etal/>
</person-group>. (<year>2018</year>). &#x201c;
<article-title>The iNaturalist species classification and detection dataset</article-title>,&#x201d; in <conf-name>2018 IEEE/CVF Conference on Computer Vision and Pattern Recognition</conf-name>, <conf-loc>Salt Lake City, UT</conf-loc>. <fpage>8769</fpage>&#x2013;<lpage>8778</lpage> (
<publisher-name>IEEE</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.1109/CVPR.2018.00914</pub-id>
</mixed-citation>
</ref>
<ref id="B71">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>van Noort</surname> <given-names>S.</given-names></name>
</person-group> (<year>2024</year>). &#x201c;
<article-title>The role of taxonomy and museums in insect conservation</article-title>,&#x201d; in <source>Routledge Handbook of Insect Conservation</source> (<publisher-loc>Milton Park, UK</publisher-loc>: 
<publisher-name>Routledge</publisher-name>).
</mixed-citation>
</ref>
<ref id="B72">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>V&#xe9;lez</surname> <given-names>J.</given-names></name>
<name><surname>McShea</surname> <given-names>W.</given-names></name>
<name><surname>Shamon</surname> <given-names>H.</given-names></name>
<name><surname>Castiblanco-Camacho</surname> <given-names>P. J.</given-names></name>
<name><surname>Tabak</surname> <given-names>M. A.</given-names></name>
<name><surname>Chalmers</surname> <given-names>C.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>An evaluation of platforms for processing camera-trap data using artificial intelligence</article-title>. <source>Methods Ecol. Evol.</source> <volume>14</volume>, <fpage>459</fpage>&#x2013;<lpage>477</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/2041-210X.14044</pub-id>
</mixed-citation>
</ref>
<ref id="B73">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Venjakob</surname> <given-names>C.</given-names></name>
<name><surname>Klein</surname> <given-names>A.-M.</given-names></name>
<name><surname>Ebeling</surname> <given-names>A.</given-names></name>
<name><surname>Tscharntke</surname> <given-names>T.</given-names></name>
<name><surname>Scherber</surname> <given-names>C.</given-names></name>
</person-group> (<year>2016</year>). 
<article-title>Plant diversity increases spatio-temporal niche complementarity in plant-pollinator interactions</article-title>. <source>Ecol. Evol.</source> <volume>6</volume>, <fpage>2249</fpage>&#x2013;<lpage>2261</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/ece3.2026</pub-id>, PMID: <pub-id pub-id-type="pmid">27069585</pub-id>
</mixed-citation>
</ref>
<ref id="B74">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Vizentin-Bugoni</surname> <given-names>J.</given-names></name>
<name><surname>Maruyama</surname> <given-names>P. K.</given-names></name>
<name><surname>de Souza</surname> <given-names>C. S.</given-names></name>
<name><surname>Ollerton</surname> <given-names>J.</given-names></name>
<name><surname>Rech</surname> <given-names>A. R.</given-names></name>
<name><surname>Sazima</surname> <given-names>M.</given-names></name>
</person-group> (<year>2018</year>). &#x201c;
<article-title>Plant-pollinator networks in the tropics: a review</article-title>,&#x201d; in <source>Ecological networks in the tropics: An integrative overview of species interactions from some of the most species-rich habitats on earth</source>  (
<publisher-name>Springer International Publishing</publisher-name>, <publisher-loc>Cham</publisher-loc>), pp. <fpage>73</fpage>&#x2013;<lpage>91</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/978-3-319-68228-0_6</pub-id>.
</mixed-citation>
</ref>
<ref id="B75">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wagner</surname> <given-names>D. L.</given-names></name>
<name><surname>Grames</surname> <given-names>E. M.</given-names></name>
<name><surname>Forister</surname> <given-names>M. L.</given-names></name>
<name><surname>Berenbaum</surname> <given-names>M. R.</given-names></name>
<name><surname>Stopak</surname> <given-names>D.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Insect decline in the Anthropocene: Death by a thousand cuts</article-title>. <source>Proc. Natl. Acad. Sci.</source> <volume>118</volume>, <fpage>e2023989118</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1073/pnas.2023989118</pub-id>, PMID: <pub-id pub-id-type="pmid">33431573</pub-id>
</mixed-citation>
</ref>
<ref id="B76">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Weisser</surname> <given-names>W. W.</given-names></name>
<name><surname>Siemann</surname> <given-names>E.</given-names></name>
</person-group> (Eds.) (<year>2008</year>). <source>Insects and Ecosystem Function</source> (<publisher-loc>Berlin, Heidelberg</publisher-loc>: 
<publisher-name>Springer Berlin Heidelberg</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.1007/978-3-540-74004-9</pub-id>
</mixed-citation>
</ref>
<ref id="B77">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wiegmann</surname> <given-names>B. M.</given-names></name>
<name><surname>Kim</surname> <given-names>J.</given-names></name>
<name><surname>Trautwein</surname> <given-names>M. D.</given-names></name>
</person-group> (<year>2009</year>). 
<article-title>Holometabolous insects (holometabola)</article-title>. <source>Timetree. Life</source> <volume>31</volume>, <fpage>260</fpage>&#x2013;<lpage>263</lpage>.
</mixed-citation>
</ref>
<ref id="B78">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Williams</surname> <given-names>K. S.</given-names></name>
<name><surname>Simon</surname> <given-names>C.</given-names></name>
</person-group> (<year>1995</year>). 
<article-title>The ecology, behavior, and evolution of periodical cicadas</article-title>. <source>Annu. Rev. Entomol.</source> <volume>40</volume>, <fpage>269</fpage>&#x2013;<lpage>295</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1146/annurev.en.40.010195.001413</pub-id>
</mixed-citation>
</ref>
<ref id="B79">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name><surname>Winner</surname> <given-names>L.</given-names></name>
</person-group> (<year>1980</year>). 
<article-title>Do Artifacts Have Politics</article-title>? In <source>Computer ethics</source>. (<publisher-loc>Milton Park, UK</publisher-loc>: 
<publisher-name>Routledge</publisher-name>), pp. <volume>177</volume>&#x2013;<lpage>192</lpage>.
</mixed-citation>
</ref>
<ref id="B80">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wong</surname> <given-names>M. K. L.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Latitude shapes diel patterns in insect biodiversity</article-title>. <source>Biol. Lett.</source> <volume>21</volume>, <fpage>20240622</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1098/rsbl.2024.0622</pub-id>, PMID: <pub-id pub-id-type="pmid">40300633</pub-id>
</mixed-citation>
</ref>
<ref id="B81">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wong</surname> <given-names>M. K. L.</given-names></name>
<name><surname>Didham</surname> <given-names>R.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>A global dataset of diel activity patterns in insect communities</article-title>. <source>Sci. Data</source> <volume>11</volume>, <fpage>558</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41597-024-03408-8</pub-id>, PMID: <pub-id pub-id-type="pmid">38816416</pub-id>
</mixed-citation>
</ref>
<ref id="B82">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name><surname>Wu</surname> <given-names>X.</given-names></name>
<name><surname>Zhan</surname> <given-names>C.</given-names></name>
<name><surname>Lai</surname> <given-names>Y.-K.</given-names></name>
<name><surname>Cheng</surname> <given-names>M.-M.</given-names></name>
<name><surname>Yang</surname> <given-names>J.</given-names></name>
</person-group> (<year>2019</year>). &#x201c;
<article-title>IP102: A large-scale benchmark dataset for insect pest recognition</article-title>,&#x201d; in <conf-name>2019 IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)</conf-name>, <conf-loc>Long Beach, CA, USA</conf-loc>. <fpage>8779</fpage>&#x2013;<lpage>8788</lpage> (
<publisher-name>IEEE</publisher-name>). doi:&#xa0;<pub-id pub-id-type="doi">10.1109/cvpr.2019.00899</pub-id>
</mixed-citation>
</ref>
<ref id="B83">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yang</surname> <given-names>L. H.</given-names></name>
<name><surname>Gratton</surname> <given-names>C.</given-names></name>
</person-group> (<year>2014</year>). 
<article-title>Insects as drivers of ecosystem processes</article-title>. <source>Curr. Opin. Insect Sci.</source> <volume>2</volume>, <fpage>26</fpage>&#x2013;<lpage>32</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.cois.2014.06.004</pub-id>, PMID: <pub-id pub-id-type="pmid">32846721</pub-id>
</mixed-citation>
</ref>
<ref id="B84">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zaller</surname> <given-names>J. G.</given-names></name>
<name><surname>Kerschbaumer</surname> <given-names>G.</given-names></name>
<name><surname>Rizzoli</surname> <given-names>R.</given-names></name>
<name><surname>Tiefenbacher</surname> <given-names>A.</given-names></name>
<name><surname>Gruber</surname> <given-names>E.</given-names></name>
<name><surname>Schedl</surname> <given-names>H.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Monitoring arthropods in protected grasslands: comparing pitfall trapping, quadrat sampling and video monitoring</article-title>. <source>Web Ecol.</source> <volume>15</volume>, <fpage>15</fpage>&#x2013;<lpage>23</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5194/we-15-15-2015</pub-id>
</mixed-citation>
</ref>
</ref-list>
<fn-group>
<fn id="n1" fn-type="custom" custom-type="edited-by">
<p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/92625">St&#xe9;phane Joost</ext-link>, Swiss Federal Institute of Technology Lausanne, Switzerland</p></fn>
<fn id="n2" fn-type="custom" custom-type="reviewed-by">
<p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/386664">Tamar Keasar</ext-link>, University of Haifa - Oranim, Israel</p>
<p><ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3314507">Titus Venverloo</ext-link>, Massachusetts Institute of Technology, United States</p></fn>
</fn-group>
</back>
</article>