<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" dtd-version="1.3" article-type="editorial">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Comput. Neurosci.</journal-id>
<journal-title-group>
<journal-title>Frontiers in Computational Neuroscience</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Comput. Neurosci.</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">1662-5188</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fncom.2026.1789388</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Editorial</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Editorial: Neuromorphic and deep learning paradigms for neural data interpretation and computational neuroscience</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Zou</surname> <given-names>Chenglong</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/1284215"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Yuan</surname> <given-names>Rui</given-names></name>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x0002A;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/2978113"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x00026; editing</role>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Wen</surname> <given-names>Jun</given-names></name>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x0002A;</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x00026; editing</role>
</contrib>
</contrib-group>
<aff id="aff1"><label>1</label><institution>School of Integrated Circuits, Beijing University of Posts and Telecommunications</institution>, <city>Beijing</city>, <country country="cn">China</country></aff>
<aff id="aff2"><label>2</label><institution>College of Computer Science, Chongqing University</institution>, <city>Chongqing</city>, <country country="cn">China</country></aff>
<aff id="aff3"><label>3</label><institution>Department of Neurosurgery, Xinqiao Hospital, Army Medical University</institution>, <city>Chongqing</city>, <country country="cn">China</country></aff>
<aff id="aff4"><label>4</label><institution>College of Artificial Intelligence, Southwest University</institution>, <city>Chongqing</city>, <country country="cn">China</country></aff>
<aff id="aff5"><label>5</label><institution>Department of Computational Biology, Mohamed bin Zayed University of Artificial Intelligence</institution>, <city>Abu Dhabi</city>, <country country="ae">United Arab Emirates</country></aff>
<author-notes>
<corresp id="c001"><label>&#x0002A;</label>Correspondence: Rui Yuan, <email xlink:href="mailto:yuanruiswu@swu.edu.cn">yuanruiswu@swu.edu.cn</email>; Jun Wen, <email xlink:href="mailto:jun.wen@mbzuai.ac.ae">jun.wen@mbzuai.ac.ae</email></corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-02-13">
<day>13</day>
<month>02</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2026</year>
</pub-date>
<volume>20</volume>
<elocation-id>1789388</elocation-id>
<history>
<date date-type="received">
<day>16</day>
<month>01</month>
<year>2026</year>
</date>
<date date-type="accepted">
<day>21</day>
<month>01</month>
<year>2026</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x000A9; 2026 Zou, Yuan and Wen.</copyright-statement>
<copyright-year>2026</copyright-year>
<copyright-holder>Zou, Yuan and Wen</copyright-holder>
<license>
<ali:license_ref start_date="2026-02-13">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<kwd-group>
<kwd>brain-inspired computing</kwd>
<kwd>deep learning</kwd>
<kwd>neural data interpretation</kwd>
<kwd>neuromorphic computing</kwd>
<kwd>spiking neural networks (SNNs)</kwd>
</kwd-group>
<funding-group>
<funding-statement>The author(s) declared that financial support was received for this work and/or its publication. This work was supported by the National Key Research and Development Program of China (Grant No. 2024YFC2607404), the National Natural Science Foundation of China (Grant No. 62406260), and the Key Project of the Chongqing Natural Science Foundation Joint Fund (Grant No. CSTB2024NSCQ-LZX0087).</funding-statement>
</funding-group>
<counts>
<fig-count count="0"/>
<table-count count="0"/>
<equation-count count="0"/>
<ref-count count="0"/>
<page-count count="2"/>
<word-count count="1063"/>
</counts>
</article-meta>
<notes notes-type="frontiers-research-topic">
<p><bold>Editorial on the Research Topic</bold> <ext-link xlink:href="https://www.frontiersin.org/research-topics/70003/neuromorphic-and-deep-learning-paradigms-for-neural-data-interpretation-and-computational-neuroscience" ext-link-type="uri">Neuromorphic and deep learning paradigms for neural data interpretation and computational neuroscience</ext-link></p></notes>
</front>
<body>
<p>Interdisciplinary research between neuromorphic and deep learning paradigms has developed rapidly in recent years. Key advances include silicon nano-devices and memristors for neuromorphic computing, brain-computer interfaces (BCIs), and high-performance SNN models for neural decoding. These technologies have significantly advanced the application of neuroscience discoveries in medicine and engineering. Based on this observation, we initiated a Research Topic and received a total of seven submissions. Finally, four distinguished articles were accepted for publication after rigorous peer review. We would like to express our sincere thanks and congratulations to all the authors and reviewers who collaborated on this Research Topic. The following contributions and highlights will be relevant to researchers in this field.</p>
<sec id="s1">
<title>Contribution 1: DT-SCNN: dual-threshold spiking convolutional neural network with fewer operations and memory access for edge applications</title>
<p>The article by <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fncom.2024.1418115">Lei et al.</ext-link> introduces a novel Dual-Threshold Spiking CNN (DT-SCNN) designed for edge applications. It uses a dual-threshold LIF neuron to generate two spiking feature maps from one membrane potential map, thereby halving the necessary operations, weights, and memory access. The results show a reduction of approximately 50% in convolutional operations with minimal accuracy loss (&#x0003C; 0.4%) on CIFAR10/MNIST/Fashion-MNIST, enabling efficient and low-latency edge deployment.</p></sec>
<sec id="s2">
<title>Contribution 2: Neuromorphic energy economics: toward biologically inspired and sustainable power market design</title>
<p><ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fncom.2025.1597038">Ye et al.</ext-link> propose a neuromorphic computing-inspired paradigm for sustainable power market design. The study advocates using event-driven SNNs for microsecond-scale, energy-efficient dynamic pricing and grid management. The authors suggest that enabling decentralized and self-organizing coordination of distributed energy resources could enhance grid resilience to renewable energy volatility.</p></sec>
<sec id="s3">
<title>Contribution 3: Triboelectric nanogenerators for neural data interpretation: bridging multi-sensing interfaces with neuromorphic and deep learning paradigms</title>
<p><ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fncom.2025.1691017">Gan et al.</ext-link> review the Triboelectric Nanogenerators (TENGs) as self-powered, flexible multi-sensors for acquiring neural and physiological signals (e.g., EEG and EMG). The authors highlight the ability of deep learning models (e.g., CNNs and RNNs) to converge for data interpretation and the advantages of neuromorphic computing methods for ultra-low-power, event-driven processing. Finally, this synergy is positioned as crucial for advanced elderly health monitoring and brain-computer interfaces.</p></sec>
<sec id="s4">
<title>Contribution 4: Bridging neuromorphic computing and deep learning for next-generation neural data interpretation</title>
<p>The study by <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fncom.2025.1737839">Zhang et al.</ext-link> proposes a hybrid framework that integrates neuromorphic computing and deep learning for neural data interpretation. The authors argue that it is necessary to combine an event-driven, low-power neuromorphic front end (e.g., SNNs) for spike-based processing with a powerful deep learning back end for high-level pattern recognition. This work also emphasizes the importance of balancing biological plausibility, energy efficiency, and computational performance.</p>
<p>Overall, the contributions presented in these studies focus on the integration and synergy of deep learning and neuromorphic computing. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fncom.2024.1418115">Lei et al.</ext-link> presents a high-accuracy and low-cost SCNN for edge applications. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fncom.2025.1597038">Ye et al.</ext-link> demonstrates the potential of neuromorphic methods for self-organizing, decentralized, and robust energy management. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fncom.2025.1691017">Gan et al.</ext-link> calls for the integration of the advantages of deep learning and neuromorphic computing for future health monitoring and brain-computer interfaces. Finally, <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fncom.2025.1737839">Zhang et al.</ext-link> recommends bridging neuromorphic computing and deep learning for next-generation neural data interpretation, such as pattern recognition. Each contribution provides significant advancements in its respective field, demonstrating the potential of neuromorphic computing and deep learning techniques in practical applications.</p></sec>
</body>
<back>
<sec sec-type="author-contributions" id="s5">
<title>Author contributions</title>
<p>CZ: Writing &#x02013; original draft, Funding acquisition. RY: Funding acquisition, Writing &#x02013; review &#x00026; editing. JW: Writing &#x02013; review &#x00026; editing.</p>
</sec>
<sec sec-type="COI-statement" id="conf1">
<title>Conflict of interest</title>
<p>The author(s) declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="ai-statement" id="s7">
<title>Generative AI statement</title>
<p>The author(s) declared that generative AI was not used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p></sec>
<sec sec-type="disclaimer" id="s8">
<title>Publisher&#x00027;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<fn-group>
<fn fn-type="custom" custom-type="edited-by" id="fn0001">
<p>Edited and reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/49205/overview">Si Wu</ext-link>, School of Psychology and Cognitive Sciences Peking University, China</p>
</fn>
</fn-group>
</back>
</article>