<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article article-type="research-article" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" dtd-version="1.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Digit. Health</journal-id><journal-title-group>
<journal-title>Frontiers in Digital Health</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Digit. Health</abbrev-journal-title></journal-title-group>
<issn pub-type="epub">2673-253X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fdgth.2025.1626279</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Original Research</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Exploring the feasibility of real-time on-device ECG biometric classification using quantized neural networks</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes"><name><surname>Berki</surname><given-names>Martin</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="corresp" rid="cor1">&#x002A;</xref><uri xlink:href="https://loop.frontiersin.org/people/2776503/overview"/><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="software" vocab-term-identifier="https://credit.niso.org/contributor-roles/software/">Software</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="visualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/visualization/">Visualization</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role></contrib>
<contrib contrib-type="author"><name><surname>Mateasik</surname><given-names>Anton</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x0026; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="software" vocab-term-identifier="https://credit.niso.org/contributor-roles/software/">Software</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role></contrib>
<contrib contrib-type="author"><name><surname>Micjan</surname><given-names>Michal</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref><uri xlink:href="https://loop.frontiersin.org/people/3274077/overview" /><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x0026; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role></contrib>
<contrib contrib-type="author"><name><surname>Vavrinsky</surname><given-names>Erik</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref><uri xlink:href="https://loop.frontiersin.org/people/3102329/overview" /><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x0026; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role></contrib>
<contrib contrib-type="author"><name><surname>Gasparek</surname><given-names>Krisztian</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x0026; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="software" vocab-term-identifier="https://credit.niso.org/contributor-roles/software/">Software</role></contrib>
<contrib contrib-type="author"><name><surname>Cernaj</surname><given-names>Lubos</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x0026; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role><role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="software" vocab-term-identifier="https://credit.niso.org/contributor-roles/software/">Software</role></contrib>
</contrib-group>
<aff id="aff1"><label>1</label><institution>Institute of Electronics and Photonics, Slovak University of Technology</institution>, <city>Bratislava</city>, <country country="sk">Slovakia</country></aff>
<aff id="aff2"><label>2</label><institution>International Laser Centre</institution>, <city>Bratislava</city>, <country country="sk">Slovakia</country></aff>
<author-notes>
<corresp id="cor1"><label>&#x002A;</label><bold>Correspondence:</bold> Martin Berki <email xlink:href="mailto:martin.berki@stuba.sk">martin.berki@stuba.sk</email></corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-02-03"><day>03</day><month>02</month><year>2026</year></pub-date>
<pub-date publication-format="electronic" date-type="collection"><year>2025</year></pub-date>
<volume>7</volume><elocation-id>1626279</elocation-id>
<history>
<date date-type="received"><day>10</day><month>05</month><year>2025</year></date>
<date date-type="rev-recd"><day>29</day><month>10</month><year>2025</year></date>
<date date-type="accepted"><day>19</day><month>12</month><year>2025</year></date>
</history>
<permissions>
<copyright-statement>&#x00A9; 2026 Berki, Mateasik, Micjan, Vavrinsky, Gasparek and Cernaj.</copyright-statement>
<copyright-year>2026</copyright-year><copyright-holder>Berki, Mateasik, Micjan, Vavrinsky, Gasparek and Cernaj</copyright-holder><license><ali:license_ref start_date="2026-02-03">https://creativecommons.org/licenses/by/4.0/</ali:license_ref><license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p></license>
</permissions>
<abstract>
<p>Biometric classification using electrocardiogram (ECG) signals offers a promising pathway for continuous, personalized healthcare monitoring. This work presents a proof-of-concept embedded deep learning system for real-time ECG biometric classification on wearable Holter devices, reducing reliance on continuous cloud connectivity. A quantized convolutional neural network (CNN) was deployed on an STM32H7 microcontroller to identify individuals based on unique ECG patterns, incorporating an initial signal quality assessment stage to ensure that only high-quality segments are processed. Evaluated on the PTB Diagnostic ECG Database with subject-specific training, the system achieved F1 score of 94.51&#x0025; and a classification accuracy of 94.68&#x0025; on five-second ECG segments, with an average inference time of 1.35&#x2009;s, enabling real-time operation on resource-constrained hardware. By performing on-device inference, the system improves data privacy, can reduce power consumption, and minimizes unnecessary data transmission. This embedded implementation demonstrates the feasibility of integrating lightweight ECG biometrics into wearable systems, with potential for future extensions toward personalized healthcare monitoring and early anomaly detection.</p>
</abstract>
<kwd-group>
<kwd>electrocardiogram</kwd>
<kwd>embedded systems</kwd>
<kwd>neural networks</kwd>
<kwd>Biometric Identification</kwd>
<kwd>Quantized Inference</kwd>
</kwd-group><funding-group><funding-statement>The author(s) declared that financial support was received for this work and/or its publication. This work was funded by the EU NextGenerationEU through the Recovery and Resilience Plan for Slovakia under the project No. 09I05-03-V02-00061.</funding-statement></funding-group><counts>
<fig-count count="6"/>
<table-count count="6"/><equation-count count="56"/><ref-count count="57"/><page-count count="14"/><word-count count="0"/></counts><custom-meta-group><custom-meta><meta-name>section-at-acceptance</meta-name><meta-value>Connected Health</meta-value></custom-meta></custom-meta-group>
</article-meta>
</front>
<body><sec id="s1" sec-type="intro"><label>1</label><title>Introduction</title>
<p>Cardiovascular diseases (CVDs) continue to represent the leading cause of mortality worldwide, accounting for approximately 20.5 million deaths in 2021, which constitutes nearly one-third of all global fatalities [<xref ref-type="bibr" rid="B1">1</xref>]. In the European Union alone, CVDs were responsible for 32.4&#x0025; of deaths during the same period [<xref ref-type="bibr" rid="B2">2</xref>]. Early detection, effective self-care, and continuous monitoring are critical strategies for mitigating complications and improving long-term patient outcomes [<xref ref-type="bibr" rid="B3">3</xref>, <xref ref-type="bibr" rid="B4">4</xref>].</p>
<p>Electrocardiogram (ECG) continues to play a central role in first-line cardiac diagnostics, providing a non-invasive method to evaluate the heart&#x2019;s electrical activity [<xref ref-type="bibr" rid="B5">5</xref>]. Standard 12-lead ECG recordings, typically performed during scheduled clinical check-ups [<xref ref-type="bibr" rid="B6">6</xref>], offer a comprehensive but transient snapshot of cardiac function. However, for patients with chronic heart conditions or those at elevated risk of ECG-detectable abnormalities, isolated measurements are insufficient to capture intermittent or evolving pathologies [<xref ref-type="bibr" rid="B7">7</xref>].</p>
<p>To address this limitation, portable ECG devices such as Holter monitors have been widely adopted to facilitate continuous cardiac monitoring over periods ranging from 24&#x2009;h to several weeks [<xref ref-type="bibr" rid="B8">8</xref>]. Holter recordings provide valuable longitudinal data, capturing transient arrhythmias and anomalies that would otherwise go undetected during brief clinical assessments [<xref ref-type="bibr" rid="B9">9</xref>]. While long-duration ECG monitoring offers new opportunities, it also introduces specific challenges. Extended recordings generate massive volumes of data, which, while rich in diagnostic potential, are computationally demanding to store, process, and analyze [<xref ref-type="bibr" rid="B10">10</xref>, <xref ref-type="bibr" rid="B11">11</xref>].</p>
<p>Historically, Holter monitors have operated as standalone systems, recording data locally for offline analysis [<xref ref-type="bibr" rid="B12">12</xref>]. Recent advances, however, have driven the evolution toward continuous wireless transmission of ECG data using wearable patches, smartwatches, and next-generation Holter devices [<xref ref-type="bibr" rid="B13">13</xref>, <xref ref-type="bibr" rid="B14">14</xref>]. These cloud-connected systems enable near real-time analysis, often powered by artificial intelligence (AI) algorithms, providing scalable solutions for automated diagnostics and long-term health monitoring [<xref ref-type="bibr" rid="B15">15</xref>, <xref ref-type="bibr" rid="B16">16</xref>].</p>
<p>Despite these benefits, cloud-based transmission introduces substantial trade-offs. Continuous data streaming requires significant bandwidth, increases power consumption, and raises concerns regarding data privacy and security. Moreover, reliance on network connectivity can lead to data transmission delays and intermittent loss of service, causing local storage bottlenecks during disruptions [<xref ref-type="bibr" rid="B16">16</xref>&#x2013;<xref ref-type="bibr" rid="B18">18</xref>]. These constraints highlight the need for more efficient data handling strategies in wearable cardiac monitoring systems.</p>
<p>Optimizing the efficiency of continuous ECG monitoring requires the ability to preprocess and classify signals directly on the device. By identifying and discarding low-quality or clinically irrelevant segments at the source, it can help reduce transmission loads, potentially extend battery life, and enhance user privacy. Embedded artificial intelligence offers a promising approach to achieving this goal, enabling real-time, on-device ECG analysis that reduces reliance on external computational infrastructure.</p>
<sec id="s1a"><label>1.1</label><title>The role of AI in personalized cardiac monitoring</title>
<p>Artificial intelligence has had a transformative impact on healthcare, enabling advancements in automated diagnostics, pathology classification, and clinical decision support systems [<xref ref-type="bibr" rid="B19">19</xref>&#x2013;<xref ref-type="bibr" rid="B21">21</xref>]. In the context of cardiology, AI-driven analysis of ECG signals has achieved significant success in the detection of arrhythmias, myocardial infarctions, and other cardiac abnormalities [<xref ref-type="bibr" rid="B22">22</xref>&#x2013;<xref ref-type="bibr" rid="B27">27</xref>]. However, the majority of current AI applications rely on cloud-based infrastructures, requiring stable network connectivity and substantial computational resources to operate effectively [<xref ref-type="bibr" rid="B28">28</xref>&#x2013;<xref ref-type="bibr" rid="B30">30</xref>].</p>
<p>Beyond conventional population-wide models, an emerging area of research focuses on personalized AI systems that adapt to the unique physiological characteristics of individual patients. Unlike generic AI models trained on large, heterogeneous datasets, personalized models learn a specific patient&#x2019;s baseline ECG patterns, enabling the detection of subtle deviations that may signify early pathological changes. This individualized approach is particularly valuable for long-term monitoring of patients with progressive cardiac conditions, where gradual shifts in ECG morphology can provide early indicators of deterioration [<xref ref-type="bibr" rid="B31">31</xref>&#x2013;<xref ref-type="bibr" rid="B34">34</xref>].</p>
<p>By establishing a dynamic, patient-specific understanding of cardiac function, personalized AI offers the potential to move beyond static diagnostics and toward continuous, adaptive health monitoring. Realizing this potential, however, requires solutions capable of operating directly on wearable devices, minimizing reliance on external infrastructure while maintaining real-time responsiveness.</p>
</sec>
<sec id="s1b"><label>1.2</label><title>Embedded AI for biometric ECG classification</title>
<p>Biometric classification involves the identification or verification of individuals based on their unique physiological or behavioral traits, such as fingerprints, facial features, or, in this study, electrocardiogram (ECG) signals. Traditional biometric systems extract distinctive features from input data to establish or confirm identity with high confidence [<xref ref-type="bibr" rid="B35">35</xref>, <xref ref-type="bibr" rid="B36">36</xref>].</p>
<p>In the context of cardiac monitoring, the morphology of an individual&#x2019;s ECG-comprising the specific shapes, intervals, and rhythms of each heartbeat-encodes subtle, person-specific electrical patterns. These patterns are influenced by anatomical variations, electrophysiological properties, and electrode placement [<xref ref-type="bibr" rid="B37">37</xref>, <xref ref-type="bibr" rid="B38">38</xref>]. By taking advantage of these inherent differences, ECG-based biometric systems can uniquely characterize individuals, offering an alternative to conventional biometric modalities.</p>
<p>This study explores the integration of biometric ECG classification directly into a wearable Holter device, enabling the system to autonomously verify patient identity in real-time without reliance on cloud servers. Rather than diagnosing cardiac abnormalities, the primary objective is to determine whether the captured ECG segment originates from the intended user. This personalized approach supports the establishment of an individual cardiac baseline, forming the foundation for future systems capable of continuous patient-specific health tracking and, eventually, the detection of deviations indicative of physiological deterioration.</p>
</sec>
</sec>
<sec id="s2"><label>2</label><title>Related work</title>
<p>Research in ECG biometrics has traditionally focused on security and identification applications, where ECG signals are used as unique physiological signatures to verify individual identity. In these approaches, distinctive features such as heartbeat intervals, waveform morphology, and rhythm patterns are extracted to establish identity with high confidence [<xref ref-type="bibr" rid="B39">39</xref>&#x2013;<xref ref-type="bibr" rid="B45">45</xref>]. Studies have demonstrated the robustness of ECG-based biometrics across varying sensor types and acquisition conditions, supporting its potential for practical deployment.</p>
<p>Beyond security applications, the integration of ECG biometrics into healthcare diagnostics has recently emerged as a promising research direction. Shusterman and London [<xref ref-type="bibr" rid="B46">46</xref>] proposed a personalized ECG monitoring (pECG) framework that combines adaptive machine learning and distributed architectures to refine patient-specific baselines over time. Their system extracts minimal features locally while relying on server-based adaptation to handle evolving physiological changes, illustrating the benefits of personalization in continuous cardiac monitoring.</p>
<p>Similarly, Mangold et al. [<xref ref-type="bibr" rid="B47">47</xref>] conducted a large-scale study using over 970,000 ECG records from approximately 100,000 patients to evaluate the feasibility of ECG signals as biometric identifiers. Employing a Siamese neural network architecture, they achieved high matching accuracy and further demonstrated that single-lead ECGs can approach the performance of full 12-lead configurations. Their analysis of longitudinal data also highlighted the dynamic nature of ECG fingerprints, suggesting the importance of adaptive modeling for long-term applications.</p>
<p>Efforts to implement real-time ECG analysis on embedded hardware have also been reported. Raj and Ray [<xref ref-type="bibr" rid="B48">48</xref>] developed a microcontroller-based monitoring system capable of local feature extraction and arrhythmia detection. Their approach relied on classical signal processing methods, including the Discrete Cosine Stockwell Transform and support vector machines, to achieve efficient on-device classification with low power consumption.</p>
<p>Recent lightweight 1D CNNs for single-lead ECG have also been explored for arrhythmia detection. Rahman and Faezipour [<xref ref-type="bibr" rid="B49">49</xref>] proposed a compact time-domain CNN and argued that its computational footprint makes it suitable for embedded deployment. While their focus is population-level arrhythmia classification rather than biometric verification, the architectural theme&#x2014;shallow temporal convolutions over short ECG windows&#x2014;aligns with our motivation for compact models on constrained devices. In contrast, our study emphasizes a fully embedded, subject-specific pipeline with static int8 quantization, explicit memory/timing analysis on a Cortex-M7, and a two-stage flow with signal-quality gating.</p>
<p>Wang et al. [<xref ref-type="bibr" rid="B50">50</xref>] proposed an ECG biometric authentication framework based on self-supervised representation learning for IoT edge devices. Their approach employed a convolutional encoder trained via contrastive pre-training on largely unlabeled ECG data, followed by correlation-based identity verification using per-subject templates. Evaluated on the PTB Diagnostic ECG Database and cross-validated on external datasets such as MIT-BIH Arrhythmia and ECG-ID, their method achieved high authentication accuracy (<inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM1"><mml:mo>&#x2248;</mml:mo></mml:math></inline-formula>&#x2009;99.1&#x0025; on PTB-DB and <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM2"><mml:mo>&#x2264;</mml:mo></mml:math></inline-formula>&#x2009;98.5&#x0025; on unseen datasets), demonstrating strong generalization capabilities. To support embedded operation, they further implemented model pruning and 8-bit quantization on a Cortex-M4F microcontroller, reporting only marginal accuracy loss (0.5&#x0025;) while reducing computational cost by approximately 37&#x0025;. This work highlighted the potential of self-supervised feature extraction for low-power biometric systems and emphasized the scalability of global ECG encoders across multiple users. In contrast, the present work pursues a complementary objective: instead of learning a population-wide ECG representation for universal authentication, it investigates a personalized verification paradigm in which the model is trained to recognize a single individual&#x2019;s cardiac signature directly on the device. The proposed framework integrates a compact, statically quantized CNN with an initial signal-quality assessment stage, enabling selective and fully autonomous operation on the embedded hardware.</p>
<p>Collectively, these studies demonstrate significant progress toward personalized, automated ECG analysis. They highlight the potential of ECG biometrics for continuous health monitoring, the importance of adapting to individual physiological baselines, and the feasibility of deploying classification systems on embedded platforms. However, challenges remain in integrating deep learning-based personalization into fully embedded systems with significantly reduced reliance on cloud infrastructure, particularly under the resource constraints typical of wearable Holter devices.</p>
</sec>
<sec id="s3"><label>3</label><title>Methodology</title>
<p>Real-time ECG biometric classification on embedded hardware was achieved through a structured workflow, addressing the specific challenges of operating within the computational and memory constraints of a resource-limited microcontroller.</p>
<sec id="s3a"><label>3.1</label><title>Data acquisition and preprocessing</title>
<p>ECG data were sourced from the PTB Diagnostic ECG Database [<xref ref-type="bibr" rid="B51">51</xref>]. We used five subjects in total: Subject 180 (7 recordings), Subjects 093 and 233 (5 recordings each), and Subjects 007 and 011 (4 recordings each). Primary evaluation was performed on Subject 180 due to the larger number of available recordings, while multi-subject validation and benchmarking were conducted on Subjects 093, 233, 007, and 011.</p>
<p>To simulate Holter monitoring, only the aVL lead was used. Signals sampled at 1,000 Hz were downsampled to 500 Hz to reduce computational load while preserving morphological features. Data were segmented into non-overlapping 5-s windows and standardized to zero mean and unit variance prior to inference. The standardized 5-s segments were then passed to the signal-quality stage followed by the biometric classifier as described below.</p>
</sec>
<sec id="s3b"><label>3.2</label><title>Signal quality assessment</title>
<p>Prior to biometric classification, the system performs an initial signal quality assessment to ensure that only clinically relevant ECG segments are processed. This step is essential, as long-duration Holter recordings often include artifacts caused by motion, poor electrode contact, or environmental noise. To enable automated filtering, a custom dataset of 187,000 manually labeled ECG segments was created, distinguishing usable signals from degraded ones based on visual inspection. A convolutional-recurrent neural network (CNN-LSTM) trained on this dataset achieved an F1 score of 90.06&#x0025; in separating high-quality segments from noise [<xref ref-type="bibr" rid="B52">52</xref>].</p>
<p>Labeling was conducted by two independent groups of trained annotators following a joint briefing and calibration session with shared guidelines. Group A and Group B each annotated one half of the dataset in the first pass, after which the halves were swapped for a second-pass review of the other group&#x2019;s labels. Samples for which the second pass did not yield consensus were removed from the final dataset.</p>
<p>For embedded deployment, the CNN-LSTM was replaced with a more efficient pure-CNN model to reduce inference time and memory usage on the STM32H7 microcontroller. To further optimize memory usage, the signal quality assessment network was aligned with the biometric classification model, sharing the same architecture and differing only in trained weights. This design enables efficient weight swapping between inference stages, minimizing memory overhead and simplifying deployment while maintaining strong classification performance.</p>
<p>On the held-out signal-quality test set, the pure-CNN achieved an F1 score of approximately 88.2&#x0025;, representing a drop of about 2 percentage points compared with the CNN-LSTM baseline. This reduction was accepted in exchange for substantially lower latency and memory footprint on the STM32H7.</p>
</sec>
<sec id="s3c"><label>3.3</label><title>Neural network architecture</title>
<p>The biometric classifier is a lightweight CNN with four convolutional layers and a fully connected head. The first convolutional layer&#x2019;s receptive field spans 250 ms, to fully capture QRS complex and adjacent waveforms. The architecture was experimentally optimized to balance accuracy and resource efficiency.</p>
</sec>
<sec id="s3d"><label>3.4</label><title>Training procedure</title>
<p>Standard training procedures were used (Adam optimizer, binary cross-entropy loss) for 50 epochs, with training and validation sets drawn from different ECG recordings.</p>
</sec>
<sec id="s3e"><label>3.5</label><title>Model quantization and embedded deployment</title>
<p>The trained model was statically quantized to 8-bit precision using ONNX [<xref ref-type="bibr" rid="B53">53</xref>], with calibration on a subset of the training data. A two-stage inference system was deployed on the STM32H7 microcontroller: the first pass assesses signal quality, and if acceptable, new model weights are reloaded for biometric classification. Although direct power measurements were not conducted, quantized inference and conditional execution are expected to reduce energy consumption.</p>
</sec>
</sec>
<sec id="s4"><label>4</label><title>Data</title>
<p>This study utilized the PTB Diagnostic ECG Database [<xref ref-type="bibr" rid="B51">51</xref>]. A summary of key database attributes is provided in <xref ref-type="table" rid="T1">Table&#x00A0;1</xref>. The database contains high-resolution, multi-channel ECG recordings from 290 subjects, sampled at 1,000 Hz with 16-bit resolution, and includes conventional 12-lead recordings alongside Frank leads. Basic demographic and clinical annotations are also provided.</p>
<table-wrap id="T1" position="float"><label>Table&#x00A0;1</label>
<caption><p>Attributes of the PTB diagnostic ECG database v1.0.0.</p></caption>
<table>
<colgroup>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th valign="top" align="left">Attribute</th>
<th valign="top" align="center">Description</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Title</td>
<td valign="top" align="left">PTB Diagnostic ECG Database v1.0.0</td>
</tr>
<tr>
<td valign="top" align="left">URL source</td>
<td valign="top" align="left"><ext-link ext-link-type="uri" xlink:href="https://www.physionet.org/content/ptbdb/1.0.0/">https://www.physionet.org/content/ptbdb/1.0.0/</ext-link></td>
</tr>
<tr>
<td valign="top" align="left">Number of records</td>
<td valign="top" align="left">549 high-resolution 15-lead ECGs</td>
</tr>
<tr>
<td valign="top" align="left">Number of subjects</td>
<td valign="top" align="left">290 subjects, each with 1 to 5 ECG records</td>
</tr>
<tr>
<td valign="top" align="left">Demographics</td>
<td valign="top" align="left">Ages 17 to 87 (mean age: 57.2 years); 209 males (mean age: 55.5), 81 females (mean age: 61.6)</td>
</tr>
<tr>
<td valign="top" align="left">ECG leads</td>
<td valign="top" align="left">15 simultaneously measured signals: 12 standard leads (I, II, III, aVR, aVL, aVF, V1, V2, V3, V4, V5, V6) and 3 Frank leads (vx, vy, vz)</td>
</tr>
<tr>
<td valign="top" align="left">Sampling rate</td>
<td valign="top" align="left">1,000 Hz (with recordings available up to 10 kHz upon request)</td>
</tr>
<tr>
<td valign="top" align="left">Resolution</td>
<td valign="top" align="left">16-bit digitization with a range of <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM3"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula>&#x2009;16.384 mV</td>
</tr>
<tr>
<td valign="top" align="left">Input voltage</td>
<td valign="top" align="left"><inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM4"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula>&#x2009;16 mV, compensated offset voltage up to <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM5"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula>&#x2009;300 mV</td>
</tr>
<tr>
<td valign="top" align="left">Bandwidth</td>
<td valign="top" align="left">0&#x2013;1 kHz (synchronous sampling of all channels)</td>
</tr>
<tr>
<td valign="top" align="left">Noise voltage</td>
<td valign="top" align="left">Maximum 10 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM6"><mml:mrow><mml:mtext fontfamily="times">&#x3BC;</mml:mtext></mml:mrow></mml:math></inline-formula>V (peak-to-peak), 3 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM7"><mml:mrow><mml:mtext fontfamily="times">&#x3BC;</mml:mtext></mml:mrow></mml:math></inline-formula>V (RMS) with input short circuit</td>
</tr>
<tr>
<td valign="top" align="left">Clinical Annotations</td>
<td valign="top" align="left">Includes age, gender, diagnosis, medical history, medication, interventions, coronary artery pathology, ventriculography, echocardiography, and hemodynamics (not available for 22 subjects)</td>
</tr>
<tr>
<td valign="top" align="left">Diagnostic classes</td>
<td valign="top" align="left">Myocardial Infarction: 148</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">Cardiomyopathy/Heart Failure: 18</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">Bundle Branch Block: 15</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">Dysrhythmia: 14</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">Myocardial Hypertrophy: 7</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">Valvular Heart Disease: 6</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">Myocarditis: 4</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">Miscellaneous: 4</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">Healthy Controls: 52</td>
</tr>
<tr>
<td valign="top" align="left">Data format</td>
<td valign="top" align="left">Each record includes a header (.hea) file with clinical summaries and a data (.dat) file containing the ECG signals</td>
</tr>
<tr>
<td valign="top" align="left">Access and license</td>
<td valign="top" align="left">Open Access under the Open Data Commons Attribution License v1.0</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>For model development and evaluation, we trained and evaluated subject-specific models for five subjects (IDs 180, 093, 233, 007, and 011). Subject 180, a 37-year-old healthy male with seven recordings (each approximately 10&#x2009;min), served as a primary example due to the larger number of recordings, but all five subjects were included for training, validation, and benchmarking as outlined in the Methodology. This multi-subject setup ensured that training, validation, and testing were conducted across distinct recordings on a per-subject basis.</p>
<p>Only the aVL lead was used to simulate a Holter setup, reflecting the single-lead configuration of our target wearable device. Although data acquisition from our custom Holter system is planned for future work, the PTB data was used to establish initial feasibility. Signals were downsampled from 1,000 Hz to 500 Hz to reduce computational overhead while preserving critical features such as QRS complexes and ST-segment morphology. Each recording was segmented into non-overlapping 5-s windows and standardized to zero mean and unit variance prior to training.</p>
</sec>
<sec id="s5"><label>5</label><title>Target hardware</title>
<p>The target platform for this study is a custom-developed wearable Holter device designed for single-channel ECG acquisition [<xref ref-type="bibr" rid="B54">54</xref>]. The device is depicted in <xref ref-type="fig" rid="F1">Figure 1</xref>. It uses a three-electrode configuration (right arm, left arm, and ground) suitable for continuous ambulatory monitoring. The device integrates a high-performance STM32H743VI6 microcontroller, enabling real-time on-device neural network inference.</p>
<fig id="F1" position="float"><label>Figure&#x00A0;1</label>
<caption><p>Three-dimensional render of the self-developed single-lead ECG Holter device designed for continuous wearable monitoring. The system consists of three interconnected modules: (1) the main digital/analog module housing the STM32H7 microcontroller, analog front-end, and wireless communication interface; (2) the ground reference electrode providing common-mode noise rejection and patient isolation; and (3) the right-arm (RA) and left-arm (LA) sensing electrodes responsible for ECG signal acquisition. All components are connected through low-profile shielded cables to ensure minimal motion artifacts and user comfort. The modular design enables both high-quality signal acquisition and efficient on-device neural network inference for real-time biometric classification.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-07-1626279-g001.tif"><alt-text content-type="machine-generated">Three connected electronic modules labeled one, two, and three are shown. Module one is rectangular with a yellow base. Module two is similar but smaller. Module three features two attached earbuds. All are connected by a black wire.</alt-text>
</graphic>
</fig>
<p>Key hardware specifications relevant to embedded AI deployment are summarized in <xref ref-type="table" rid="T2">Table&#x00A0;2</xref>. The Cortex-M7 processor, combined with 1 MB RAM and efficient DMA support, provides the necessary computational resources for running quantized convolutional models within tight power and memory constraints.</p>
<table-wrap id="T2" position="float"><label>Table&#x00A0;2</label>
<caption><p>Key specifications of the STM32H743VI6 microcontroller.</p></caption>
<table>
<colgroup>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th valign="top" align="left">Parameter</th>
<th valign="top" align="center">Specification</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Core</td>
<td valign="top" align="left">32-bit Arm Cortex-M7, 480 MHz, double-precision FPU, DSP instructions</td>
</tr>
<tr>
<td valign="top" align="left">RAM</td>
<td valign="top" align="left">1 MB (192 KB TCM <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM8"><mml:mo>+</mml:mo></mml:math></inline-formula> 864 KB SRAM)</td>
</tr>
<tr>
<td valign="top" align="left">Flash memory</td>
<td valign="top" align="left">2 MB (read-while-write support)</td>
</tr>
<tr>
<td valign="top" align="left">DMA controllers</td>
<td valign="top" align="left">4 DMA engines, including 1 Master DMA (MDMA)</td>
</tr>
<tr>
<td valign="top" align="left">Clock sources</td>
<td valign="top" align="left">HSI 64 MHz, HSE 4&#x2013;48 MHz, LSE 32.768 kHz, 3 PLLs</td>
</tr>
<tr>
<td valign="top" align="left">External memory interface</td>
<td valign="top" align="left">Dual-mode Quad-SPI, SDRAM, NOR/NAND flash support (up to 100 MHz)</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>While the device supports modular upgrades such as multi-lead ECG acquisition, auxiliary sensing (IMU, barometer, magnetometer), and 2G/4G cellular communication, these features were not utilized in the present study. The current setup focuses solely on the future single-lead ECG acquisition and processing for proof-of-concept validation.</p>
</sec>
<sec id="s6"><label>6</label><title>Convolutional neural network architecture</title>
<p>The proposed convolutional neural network (CNN) processes 5-s, single-channel ECG segments sampled at 500 Hz, corresponding to 2,500 time steps per input. The architecture consists of four convolutional layers followed by a fully connected classifier, progressively transforming raw ECG waveforms into an abstract representation suitable for classification. Although convolutional networks are often treated as &#x201D;black boxes,&#x201D; their hierarchical structure allows them to learn increasingly complex patterns [<xref ref-type="bibr" rid="B55">55</xref>], as demonstrated by <xref ref-type="fig" rid="F2">Figure&#x00A0;2</xref>, which visualizes the output of selected filters at different layers.</p>
<fig id="F2" position="float"><label>Figure&#x00A0;2</label>
<caption><p>Training and validation loss over 50 epochs for the PTB dataset. The model&#x2019;s validation loss steadily declined during early epochs, reaching a minimum of <bold>0.0289</bold> at epoch 35.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-07-1626279-g002.tif"><alt-text content-type="machine-generated">Graph showing training and validation loss over 50 epochs. The solid red line represents training loss, and the dashed blue line represents validation loss. The lowest validation loss, 0.0289, occurs at epoch 35.</alt-text>
</graphic>
</fig>
<p>The first convolutional layer applies 32 filters with a kernel size of 125 and a stride of 8, effectively downsampling the signal while preserving its key morphological characteristics. This layer&#x2019;s receptive field spans 250 ms, allowing it to process an entire QRS complex along with adjacent waveform components. Batch normalization is used to stabilize training, followed by a ReLU activation. A max-pooling operation with a kernel size of 4 and stride 4 further reduces the temporal resolution, outputting a feature map of shape (32, 78), where each feature encodes localized signal variations. As illustrated in <xref ref-type="fig" rid="F3">Figure&#x00A0;3b</xref>, this stage predominantly highlights sharp transitions, which are characteristic of QRS complexes and other high-frequency elements.</p>
<fig id="F3" position="float"><label>Figure&#x00A0;3</label>
<caption><p>Visualization of convolutional layer outputs. The original ECG signal (top) is gradually transformed as it moves through the network. Early layers keep high-frequency components, while deeper layers capture broader waveform patterns. (<bold>a</bold>) Original signal. (<bold>b</bold>) Convolved signals at Layer 1. (<bold>c</bold>) Convolved signals at Layer 2. (<bold>d</bold>) Convolved signals at Layer 3. (<bold>e</bold>) Convolved signals at Layer 4.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-07-1626279-g003.tif"><alt-text content-type="machine-generated">Five line graphs labeled a) to e) show amplitude versus samples. Each graph compares three filters. Graph a) shows more pronounced peaks compared to the others. Graphs b) to e) display progressively smoother lines, indicating varying filter effects across 300, 80, 40, and 20 samples respectively. Legends specify filter numbers for each graph.</alt-text>
</graphic>
</fig>
<p>The second convolutional layer refines these initial representations using 32 filters with a smaller kernel size of 3, a stride of 1, and padding of 1, ensuring that local signal morphology is preserved while allowing the network to detect subtle waveform variations. Another max-pooling operation (kernel size 2, stride 2) reduces the feature map to (32, 39). At this stage, as seen in <xref ref-type="fig" rid="F3">Figure&#x00A0;3c</xref>, the extracted features still retain noticeable ECG morphology but become more invariant to small perturbations.</p>
<p>The third convolutional layer expands the network&#x2019;s capacity by increasing the number of filters to 64 while maintaining a kernel size of 3 and a stride of 1. A subsequent max-pooling operation (kernel size 2, stride 2) further reduces the temporal dimension to (64, 19), distilling the most salient inter-beat patterns. <xref ref-type="fig" rid="F3">Figure&#x00A0;3d</xref> demonstrates that this layer begins to focus more on rhythm-based variations rather than fine waveform details, aiding in subject-specific identification.</p>
<p>The final convolutional layer (<xref ref-type="fig" rid="F3">Figure&#x00A0;3e</xref>) preserves the structural integrity of the extracted features while condensing them into a compact representation. Using 64 filters with a kernel size of 3 and an adaptive average pooling operation, it outputs a feature map of size (64,1), which serves as an abstract signature of the input ECG segment. Unlike earlier layers, which retain interpretable waveform-like structures, this stage generates highly distilled feature vectors that no longer resemble the raw input but encapsulate key distinguishing characteristics.</p>
<p>Following convolutional feature extraction, the output is flattened and passed through a fully connected classifier. The first dense layer maps the 64-element feature vector to 32 neurons with ReLU activation, followed by a final layer that applies a sigmoid activation function to produce a probability score in the range <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM9"><mml:mo stretchy="false">[</mml:mo><mml:mn>0</mml:mn><mml:mo>,</mml:mo><mml:mn>1</mml:mn><mml:mo stretchy="false">]</mml:mo></mml:math></inline-formula>, representing the likelihood that the input belongs to a specific individual. The fully connected layers refine the high-level representations, reinforcing subject-specific characteristics while discarding redundant information. Visualization of the network&#x0027;s layers is depicted in <xref ref-type="fig" rid="F4">Figure 4</xref>.</p>
<fig id="F4" position="float"><label>Figure&#x00A0;4</label>
<caption><p>The architecture of the proposed CNN, consisting of four convolutional layers followed by a fully connected classifier. The model gradually transforms ECG signals into feature representations suitable for classification.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-07-1626279-g004.tif"><alt-text content-type="machine-generated">Diagram of a deep learning model for an ECG signal, illustrating layers and operations. It starts with Conv1D for a five-second ECG segment, followed by layers of BatchNormalization, ReLU, and MaxPooling1D. The model continues with more Conv1D, BatchNorm1D, ReLU, AdaptiveAvgPool1D, and concludes with dense layers for output prediction. Color-coded blocks represent different operations.</alt-text>
</graphic>
</fig>
</sec>
<sec id="s7"><label>7</label><title>Training results for biometric classification</title>
<p>We trained and evaluated subject-specific models for five enrolled subjects (180, 093, 233, 007, 011). For each subject, training, validation, and testing sets were drawn from distinct recordings to avoid leakage. For Subject 180&#x2014;which had seven recordings&#x2014;we used four recordings for training (<inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM10"><mml:mo>&#x2248;</mml:mo></mml:math></inline-formula>&#x2009;40&#x2009;min), two for validation, and one for testing. For the remaining subjects, analogous per-subject splits were used; the segment-level Train/Test/Eval counts are summarized in <xref ref-type="table" rid="T3">Table&#x00A0;3</xref>. In all cases, evaluation used recordings unseen during training and validation.</p>
<table-wrap id="T3" position="float"><label>Table&#x00A0;3</label>
<caption><p>Verification performance per enrolled subject.</p></caption>
<table>
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="center"/>
<col align="center"/>
<col align="center"/>
</colgroup>
<thead>
<tr>
<th valign="top" align="left">Patient</th>
<th valign="top" align="left">Model</th>
<th valign="top" align="left">Domain</th>
<th valign="top" align="center">Train/test/eval</th>
<th valign="top" align="center">PC-F1</th>
<th valign="top" align="center">PR-AUC</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">180</td>
<td valign="top" align="left">CNN</td>
<td valign="top" align="left">Time</td>
<td valign="top" align="center">95/48/24</td>
<td valign="top" align="center">0.95</td>
<td valign="top" align="center">0.99</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">MobileNet</td>
<td valign="top" align="left">Time</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.88</td>
<td valign="top" align="center">0.95</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">CNN</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.41</td>
<td valign="top" align="center">0.69</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">MobileNet</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.44</td>
<td valign="top" align="center">0.54</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">SVM</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.68</td>
<td valign="top" align="center">0.86</td>
</tr>
<tr>
<td valign="top" align="left">093</td>
<td valign="top" align="left">CNN</td>
<td valign="top" align="left">Time</td>
<td valign="top" align="center">69/23/23</td>
<td valign="top" align="center">0.93</td>
<td valign="top" align="center">0.97</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">MobileNet</td>
<td valign="top" align="left">Time</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.42</td>
<td valign="top" align="center">0.63</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">CNN</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.69</td>
<td valign="top" align="center">0.82</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">MobileNet</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.75</td>
<td valign="top" align="center">0.89</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">SVM</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.16</td>
<td valign="top" align="center">0.33</td>
</tr>
<tr>
<td valign="top" align="left">233</td>
<td valign="top" align="left">CNN</td>
<td valign="top" align="left">Time</td>
<td valign="top" align="center">72/24/24</td>
<td valign="top" align="center">0.94</td>
<td valign="top" align="center">0.97</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">MobileNet</td>
<td valign="top" align="left">Time</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.81</td>
<td valign="top" align="center">0.89</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">CNN</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.91</td>
<td valign="top" align="center">0.96</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">MobileNet</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.80</td>
<td valign="top" align="center">0.92</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">SVM</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.77</td>
<td valign="top" align="center">0.88</td>
</tr>
<tr>
<td valign="top" align="left">007</td>
<td valign="top" align="left">CNN</td>
<td valign="top" align="left">Time</td>
<td valign="top" align="center">46/23/23</td>
<td valign="top" align="center">0.92</td>
<td valign="top" align="center">0.96</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">MobileNet</td>
<td valign="top" align="left">Time</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.65</td>
<td valign="top" align="center">0.73</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">CNN</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.52</td>
<td valign="top" align="center">0.77</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">MobileNet</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.22</td>
<td valign="top" align="center">0.35</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">SVM</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.35</td>
<td valign="top" align="center">0.55</td>
</tr>
<tr>
<td valign="top" align="left">011</td>
<td valign="top" align="left">CNN</td>
<td valign="top" align="left">Time</td>
<td valign="top" align="center">29/23/23</td>
<td valign="top" align="center">0.83</td>
<td valign="top" align="center">0.90</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">MobileNet</td>
<td valign="top" align="left">Time</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.72</td>
<td valign="top" align="center">0.88</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">CNN</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.04</td>
<td valign="top" align="center">0.11</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">MobileNet</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.07</td>
<td valign="top" align="center">0.09</td>
</tr>
<tr>
<td valign="top" align="left"/>
<td valign="top" align="left">SVM</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center"/>
<td valign="top" align="center">0.33</td>
<td valign="top" align="center">0.61</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="TF1"><p>Train/test/eval counts denote 5&#x2009;s segments.</p></fn>
<fn id="TF2"><p>PC-F1, positive-class F1; PR-AUC, area under the precision&#x2013;recall curve.</p></fn>
</table-wrap-foot>
</table-wrap>
<p>Across subjects, the time-domain CNN consistently provided the strongest verification performance. Averaged over all five enrolled subjects, the time-domain CNN achieved <bold>0.91</bold> <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM11"><mml:mrow><mml:mo mathvariant="bold">&#x00B1;</mml:mo></mml:mrow></mml:math></inline-formula> <bold>0.05 PC-F1</bold> and <bold>0.96</bold> <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM12"><mml:mrow><mml:mo mathvariant="bold">&#x00B1;</mml:mo></mml:mrow></mml:math></inline-formula> <bold>0.03 PR-AUC</bold> (<xref ref-type="table" rid="T4">Table&#x00A0;4</xref>). The best result was observed for Subject 180 with <bold>PC-F1</bold> <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM13"><mml:mrow><mml:mo mathvariant="bold">=</mml:mo></mml:mrow></mml:math></inline-formula> <bold>0.95</bold> and <bold>PR-AUC</bold> <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM14"><mml:mrow><mml:mo mathvariant="bold">=</mml:mo></mml:mrow></mml:math></inline-formula> <bold>0.99</bold>, indicating highly separable per-subject embeddings and a robust precision&#x2013;recall trade-off. The comparatively lower performance on Subject 011 (<bold>PC-F1</bold> <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM15"><mml:mrow><mml:mo mathvariant="bold">=</mml:mo></mml:mrow></mml:math></inline-formula> <bold>0.83</bold>) likely reflects a combination of smaller training set size (29 training segments) and greater intra-recording variability. Notably, the narrow standard deviations for the time-domain CNN contrast with the larger variability of FFT-domain models (e.g., CNN FFT PC-F1 SD <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM16"><mml:mo>=</mml:mo></mml:math></inline-formula> 0.33), suggesting that spectral features are more sensitive to subject- and session-specific conditions.</p>
<table-wrap id="T4" position="float"><label>Table&#x00A0;4</label>
<caption><p>Mean <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM17"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula> standard deviation of positive-class F1 (PC-F1) and area under the precision&#x2013;recall curve (PR-AUC) across all enrolled subjects.</p></caption>
<table>
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="center"/>
<col align="center"/>
</colgroup>
<thead>
<tr>
<th valign="top" align="left">Model</th>
<th valign="top" align="left">Domain</th>
<th valign="top" align="center">PC-F1 (mean&#x2009;<inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM18"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula>&#x2009;SD)</th>
<th valign="top" align="center">PR-AUC (mean&#x2009;<inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM19"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula>&#x2009;SD)</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">CNN</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center">0.51 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM20"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula> 0.33</td>
<td valign="top" align="center">0.67 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM21"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula> 0.33</td>
</tr>
<tr>
<td valign="top" align="left">CNN</td>
<td valign="top" align="left">Time</td>
<td valign="top" align="center">0.91 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM22"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula> 0.05</td>
<td valign="top" align="center">0.96 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM23"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula> 0.03</td>
</tr>
<tr>
<td valign="top" align="left">MobileNet</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center">0.46 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM24"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula> 0.32</td>
<td valign="top" align="center">0.56 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM25"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula> 0.35</td>
</tr>
<tr>
<td valign="top" align="left">MobileNet</td>
<td valign="top" align="left">Time</td>
<td valign="top" align="center">0.70 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM26"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula> 0.18</td>
<td valign="top" align="center">0.82 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM27"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula> 0.13</td>
</tr>
<tr>
<td valign="top" align="left">SVM</td>
<td valign="top" align="left">FFT</td>
<td valign="top" align="center">0.46 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM28"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula> 0.26</td>
<td valign="top" align="center">0.65 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM29"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula> 0.23</td>
</tr>
</tbody>
</table>
</table-wrap>
<table-wrap id="T5" position="float"><label>Table&#x00A0;5</label>
<caption><p>Quantization and embedded model analysis.</p></caption>
<table>
<colgroup>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th valign="top" align="left">Parameter</th>
<th valign="top" align="center">Specification</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Model format</td>
<td valign="top" align="left">ONNX</td>
</tr>
<tr>
<td valign="top" align="left">Pre-quantization size</td>
<td valign="top" align="left">123 KB (<monospace>float32</monospace> model)</td>
</tr>
<tr>
<td valign="top" align="left">Post-quantization size</td>
<td valign="top" align="left">48 KB (<monospace>int8</monospace> static quantization)</td>
</tr>
<tr>
<td valign="top" align="left">Compression ratio</td>
<td valign="top" align="left">60.9&#x0025; reduction in model size</td>
</tr>
<tr>
<td valign="top" align="left">Quantization type</td>
<td valign="top" align="left">Static Quantization (ONNX <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM30"><mml:mo stretchy="false">&#x2192;</mml:mo></mml:math></inline-formula> ONNX&#x005F;Q8)</td>
</tr>
<tr>
<td valign="top" align="left">MACC operations</td>
<td valign="top" align="left">1,983,138 Multiply-Accumulate Operations</td>
</tr>
<tr>
<td valign="top" align="left">Weights memory (RO)</td>
<td valign="top" align="left">28,484 B (27.82 KB)</td>
</tr>
<tr>
<td valign="top" align="left">Activation memory (RW)</td>
<td valign="top" align="left">8,980 B (8.77 KB)</td>
</tr>
<tr>
<td valign="top" align="left">Total RAM usage</td>
<td valign="top" align="left">8,980 B (includes activation buffer)</td>
</tr>
<tr>
<td valign="top" align="left">Input tensor</td>
<td valign="top" align="left"><monospace>int8(1x2500)</monospace>, 2.44 KB, QLinear(0.2578, <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM31"><mml:mo>&#x2212;</mml:mo></mml:math></inline-formula>50, int8)</td>
</tr>
<tr>
<td valign="top" align="left">Output tensor</td>
<td valign="top" align="left"><monospace>int8(1x1)</monospace>, 1 Byte, QLinear(0.0039, <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM32"><mml:mo>&#x2212;</mml:mo></mml:math></inline-formula>128, int8)</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>In four out of five subjects, time-domain models clearly outperformed their FFT-based counterparts, consistent with the aggregate statistics in <xref ref-type="table" rid="T4">Table&#x00A0;4</xref>. An interesting exception is Subject 233, where the FFT-domain CNN reached <bold>PC-F1</bold> <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM33"><mml:mrow><mml:mo mathvariant="bold">=</mml:mo></mml:mrow></mml:math></inline-formula> <bold>0.91</bold> and <bold>PR-AUC</bold> <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM34"><mml:mrow><mml:mo mathvariant="bold">=</mml:mo></mml:mrow></mml:math></inline-formula> <bold>0.96</bold>, closely trailing the time-domain CNN (0.94/0.97). This suggests that, for certain individuals, frequency-encoded morphology may capture subject-specific traits that are nearly as discriminative as time-local features. Nevertheless, FFT-domain performance was generally weaker (e.g., Subjects 007 and 011), indicating that phase and temporal context remain important for stable verification across diverse recording conditions.</p>
<p>The lightweight time-domain CNN consistently surpassed MobileNet variants on this task. Notably, Subject 093 showed a pronounced gap for MobileNet in the time domain (<bold>PC-F1</bold> <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM35"><mml:mrow><mml:mo mathvariant="bold">=</mml:mo></mml:mrow></mml:math></inline-formula> <bold>0.42</bold>), while the same architecture performed substantially better in the FFT domain (0.75). On average (<xref ref-type="table" rid="T4">Table&#x00A0;4</xref>), MobileNet Time reached <bold>0.70</bold> <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM36"><mml:mrow><mml:mo mathvariant="bold">&#x00B1;</mml:mo></mml:mrow></mml:math></inline-formula> <bold>0.18 PC-F1</bold>, reflecting higher across-subject variability than the CNN Time, whereas MobileNet FFT averaged 0.46 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM37"><mml:mo>&#x00B1;</mml:mo></mml:math></inline-formula> 0.32 PC-F1. We hypothesize that depthwise separable convolutions may be more sensitive to per-subject normalization and limited data in the time domain, whereas their inductive bias transfers more gracefully to spectral representations. Classical SVMs in the FFT domain were occasionally competitive (e.g., Subject 180, <bold>PC-F1</bold> <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM38"><mml:mrow><mml:mo mathvariant="bold">=</mml:mo></mml:mrow></mml:math></inline-formula> <bold>0.68</bold> vs. MobileNet FFT at 0.44), underscoring that simpler decision boundaries can work reasonably well when features emphasize stable, subject-specific spectral bands.</p>
<p>Performance correlated with the amount and diversity of training material per subject. Subject 180 benefited from the largest pool of recordings, allowing us to reserve multiple unseen records for validation and testing while maintaining a sizable training set. In contrast, Subject 011 exhibited the lowest CNN time-domain PC-F1 (0.83), which we attribute to both fewer training segments and potentially higher within-subject variability. This observation aligns with a broader principle in personalized biometrics: subject-specific models improve with increased coverage of day-to-day variation, electrode placement drift, and noise conditions.</p>
<p>Reported PC-F1 values reflect thresholds selected on per-subject validation sets to optimize F1. The high PR-AUC values (often <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM39"><mml:mo>&#x2265;</mml:mo><mml:mn>0.96</mml:mn></mml:math></inline-formula>) indicate that scores are well-calibrated for ranking, offering flexibility to tune operating points post-deployment&#x2014;for example, targeting low false accept rates in clinical workflows while retaining acceptable true accept rates. In practice, we recommend setting subject-specific thresholds based on a short enrollment calibration to match task requirements (e.g., stricter verification vs. more permissive monitoring).</p>
<p>From an embedded deployment perspective, we find it encouraging that a compact time-domain CNN reliably outperforms heavier or more generic architectures while remaining deployable on a microcontroller. The consistent edge of time-domain features suggests that preserving temporal morphology&#x2014;even in short, 5-s windows&#x2014;provides strong identity cues. At the same time, the near-parity of FFT performance for Subject 233 is a useful reminder that some users may present more stable frequency-domain signatures, which could be exploited in hybrid or adaptive pipelines. For the few lower-performing cases, we expect that modest additions&#x2014;beat-synchronous alignment, rhythm-aware pooling, or subject-tailored data augmentation (e.g., controlled baseline wander, mild heart-rate shifts)&#x2014;would close much of the gap without sacrificing on-device efficiency.</p>
<p><xref ref-type="fig" rid="F4">Figure&#x00A0;4</xref> illustrates the evolution of training and validation loss across 50 epochs. The validation loss exhibited a smooth and steady decline, reaching its lowest value of <bold>0.0289</bold> at epoch <bold>35</bold>, indicating strong generalization ability. The training loss followed a similar downward trajectory.</p>
</sec>
<sec id="s8"><label>8</label><title>Neural network embedded implementation</title>
<sec id="s8a"><label>8.1</label><title>Model quantization</title>
<p>To deploy the neural network efficiently on the STM32H7 microcontroller, the model underwent a structured quantization pipeline. The objective of this process was to reduce the memory footprint and computational overhead while maintaining classification accuracy. The trained model was first exported from PyTorch format to the ONNX format, ensuring compatibility with embedded AI deployment tools. The ONNX model preprocessing functionality was then used to simulate inference behavior and define the required calibration parameters.</p>
<p>Quantization was performed using static post-training quantization, converting floating-point computations into fixed-point arithmetic. This transformation involved mapping all network operations, including convolutions and fully connected layers, to integer precision (<monospace>int8</monospace>). The model was statically quantized to ONNX&#x005F;Q8 format, applying affine transformations to both activations and weights. This reduced the model size from 123 KB in floating-point precision to 48 KB, achieving a compression ratio of 60.9&#x0025;. Parameter details of the quantization process are listed in <xref ref-type="table" rid="T5">Table 5</xref>.</p>
<p>The static quantization process relies on affine integer transformations to scale floating-point values into an 8-bit fixed-point range. Quantization of inputs and outputs follows the standard QLinear transformation described by <xref ref-type="disp-formula" rid="disp-formula1">Equations 1</xref>, <xref ref-type="disp-formula" rid="disp-formula2">2</xref>: ?><disp-formula id="disp-formula1"><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="DM1"><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mtext>int8</mml:mtext></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mtext>round</mml:mtext><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mfrac><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mtext>\,float32</mml:mtext></mml:mrow></mml:msub><mml:msub><mml:mi>S</mml:mi><mml:mi>X</mml:mi></mml:msub></mml:mfrac></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>+</mml:mo><mml:msub><mml:mi>Z</mml:mi><mml:mi>X</mml:mi></mml:msub></mml:math><label>(1)</label></disp-formula><disp-formula id="disp-formula2"><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="DM2"><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mtext>\,float32</mml:mtext></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:msub><mml:mi>S</mml:mi><mml:mi>X</mml:mi></mml:msub><mml:mo>&#x00D7;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mtext>int8</mml:mtext></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>Z</mml:mi><mml:mi>X</mml:mi></mml:msub><mml:mo stretchy="false">)</mml:mo></mml:math><label>(2)</label></disp-formula>where <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM40"><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mtext>\,float32</mml:mtext></mml:mrow></mml:msub></mml:math></inline-formula> represents the original floating-point input, <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM41"><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mtext>int8</mml:mtext></mml:mrow></mml:msub></mml:math></inline-formula> is the quantized integer representation, <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM42"><mml:msub><mml:mi>S</mml:mi><mml:mi>X</mml:mi></mml:msub></mml:math></inline-formula> is the scale factor, and <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM43"><mml:msub><mml:mi>Z</mml:mi><mml:mi>X</mml:mi></mml:msub></mml:math></inline-formula> is the zero-point offset. This mapping allows all matrix multiplications and convolutions to be performed using integer arithmetic, significantly reducing computational complexity.</p>
<p>Substituting the calibrated parameters into <xref ref-type="disp-formula" rid="disp-formula1">Equations 1</xref>, <xref ref-type="disp-formula" rid="disp-formula2">2</xref>, the quantization for the ECG model becomes <xref ref-type="disp-formula" rid="disp-formula3">Equations 3</xref>, <xref ref-type="disp-formula" rid="disp-formula4">4</xref>: ?><disp-formula id="disp-formula3"><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="DM3"><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mtext>int8</mml:mtext></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mtext>round</mml:mtext><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mfrac><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mtext>\,float32</mml:mtext></mml:mrow></mml:msub><mml:mn>0.2578</mml:mn></mml:mfrac></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mn>50</mml:mn></mml:math><label>(3)</label></disp-formula><disp-formula id="disp-formula4"><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="DM4"><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mtext>\,float32</mml:mtext></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mn>0.0039</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mtext>int8</mml:mtext></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:mn>128</mml:mn><mml:mo stretchy="false">)</mml:mo></mml:math><label>(4)</label></disp-formula></p>
</sec>
<sec id="s8b"><label>8.2</label><title>Quantized model performance</title>
<p>Quantization did not adversely impact the model&#x2019;s classification performance. The 8-bit integer quantized model achieved accuracy, precision, recall, and F1-score metrics equivalent to those of the original 32-bit floating-point PyTorch model within the expected margin of evaluation variability. These results confirmed that efficient model deployment on resource-constrained embedded hardware was possible without compromising predictive performance.</p>
<p>As shown in <xref ref-type="fig" rid="F5">Figure&#x00A0;5</xref>, while convolutional layers dominated in raw MAC operations, pooling layers exhibited a disproportionately high actual Multiply-Accumulate (MAC) cost (c&#x005F;macc), with MaxPool<inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM44"><mml:msub><mml:mi></mml:mi><mml:mn>1</mml:mn></mml:msub></mml:math></inline-formula> alone accounting for 63.6&#x0025; of total computational complexity despite performing a negligible number of MAC operations. This behavior was attributed to memory-bound inefficiencies, where pooling operations require multiple unoptimized memory fetches, strided indexing, and cache-inefficient comparisons [<xref ref-type="bibr" rid="B56">56</xref>]. The dual <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM45"><mml:mi>X</mml:mi></mml:math></inline-formula>-axis visualization distinguished between pure arithmetic intensity (log scale) and real-world execution cost (linear scale), revealing that memory overhead outweighed pure computation in embedded AI inference.</p>
<fig id="F5" position="float"><label>Figure&#x00A0;5</label>
<caption><p>Operation complexity breakdown of the quantized ECG model on STM32H7. This figure presents a layer-wise analysis of computational complexity, comparing the number of Multiply-Accumulate (MAC) operations (blue bars) with the actual computational cost (c&#x005F;macc contribution, red bars), which accounts for memory access and processing overhead.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-07-1626279-g005.tif"><alt-text content-type="machine-generated">Bar chart displaying contributions of different operations to c_macc. MaxPool_3, MaxPool_2, MaxPool_1, Conv2D_4, Conv2D_3, Conv2D_2, and Conv2D_1 are labeled. MaxPool_3 has 33, MaxPool_2 has 2,080, and MaxPool_1 has 1,216 units of c_macc contribution. Conv2D_1 contributes the most MAC operations with 1,262,016. The x-axis is on a logarithmic scale, with separate breakdowns for c_macc contribution and MAC operations visually distinguished.</alt-text>
</graphic>
</fig>
<p>While convolutional layers benefited from SIMD acceleration and optimized matrix-multiplication kernels on the STM32H7 platform [<xref ref-type="bibr" rid="B56">56</xref>], pooling operations lacked comparable parallelization support, resulting in execution bottlenecks. These observations were consistent with prior work emphasizing the importance of memory access patterns in constrained environments [<xref ref-type="bibr" rid="B56">56</xref>].</p>
<p>Based on these findings, stride-based convolutions or average pooling operations could offer more efficient alternatives to max-pooling in resource-limited deployments, consistent with strategies used in lightweight architectures such as MobileNet [<xref ref-type="bibr" rid="B57">57</xref>].</p>
<p>The execution efficiency of deep learning models on embedded systems was evaluated using the timeline shown in <xref ref-type="fig" rid="F6">Figure&#x00A0;6</xref>, which presents execution time across five computational stages, measured in both milliseconds and CPU cycle counts. The stages included downsampling, standardization, first inference, weights reload, and second inference. Analysis of this timeline enabled identification of computational bottlenecks, memory access overhead, and the real-time implications of quantized inference.</p>
<fig id="F6" position="float"><label>Figure&#x00A0;6</label>
<caption><p>Embedded neural network inference timeline, showing execution stages in both milliseconds (ms) and CPU cycle counts. The processing pipeline is divided into five stages: (1) downsampling, (2) standardization, (3) first inference (signal quality assessment), (4) weights reload, and (5) second inference (biometric classification). The total execution time of approximately 1.35&#x2009;s ensures real-time processing for 5-s ECG data blocks.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-07-1626279-g006.tif"><alt-text content-type="machine-generated">Timeline diagram titled \"Neural Network Embedded Inference Timeline\" showing computation stages over time. Stage 1: 0.32 ms, 20800 cycles. Stage 2: 2.50 ms, 160000 cycles. Stage 3: 674.59 ms, 43200000 cycles in red. Stage 4: 7.67 ms, 491000 cycles. Time in milliseconds and CPU cycles are indicated along the axes.</alt-text>
</graphic>
</fig>
<p>The first inference stage, which required 674.59 ms, served as a signal quality assessment step. Based on its output, computation proceeded conditionally. If the signal quality was marked as sufficient for further processing, the system reloaded a new set of model weights (7.67 ms) and performed a second inference (674.58 ms). This conditional execution strategy enabled adaptive processing, allowing the system to adjust computational resources based on signal characteristics.</p>
<p>From a computational perspective, inference remained the most resource-intensive operation. The first and second inference stages together accounted for approximately 98&#x0025; of the total execution time. Preprocessing steps such as downsampling (0.32 ms) and standardization (2.49 ms) imposed relatively low computational demand, though standardization still introduced measurable overhead due to floating-point arithmetic.</p>
<p>A complementary analysis of CPU cycle counts confirmed the dominance of inference-related operations. Each inference stage required approximately 43,173,585 CPU cycles. The weights reload stage, despite its short execution time, incurred 490,852 CPU cycles due to memory access operations. The standardization step consumed 159,898 cycles, compared to only 20,776 cycles for downsampling, suggesting that preprocessing contributed non-negligibly to overall latency.</p>
<p>Given that the system processed each 5-s ECG block in approximately 1.35&#x2009;s, the implementation satisfied the real-time constraint of completing inference within the acquisition window. The available processing margin ensured that the system remained responsive and capable of continuous operation, leaving headroom for additional tasks such as data storage or transmission.</p>
</sec>
</sec>
<sec id="s9"><label>9</label><title>Baseline comparisons</title>
<p><xref ref-type="table" rid="T6">Table&#x00A0;6</xref> summarizes representative studies on ECG-based biometric identification and embedded ECG classification, focusing on reported performance, model size, and computational complexity. Deep learning approaches such as those by Agrawal et al. [<xref ref-type="bibr" rid="B39">39</xref>] and Mangold et al. [<xref ref-type="bibr" rid="B47">47</xref>] demonstrated the strong discriminative potential of ECG morphology for user identification, achieving accuracies exceeding 98&#x0025;. However, these works relied on high-capacity neural networks trained and evaluated in GPU or cloud environments, without addressing the constraints of wearable or resource-limited devices.</p>
<table-wrap id="T6" position="float"><label>Table&#x00A0;6</label>
<caption><p>Comparison of related ECG-based biometric and embedded learning studies in terms of accuracy, model size, and computational complexity.</p></caption>
<table>
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th valign="top" align="left">Study</th>
<th valign="top" align="center">Dataset/subjects</th>
<th valign="top" align="center">Task type</th>
<th valign="top" align="center">Performance</th>
<th valign="top" align="center">Model size/parameters</th>
<th valign="top" align="center">Computational complexity/platform</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Agrawal et al. [<xref ref-type="bibr" rid="B39">39</xref>]</td>
<td valign="top" align="left">PTB Diagnostic ECG Database, multi-subject authentication</td>
<td valign="top" align="left">User authentication</td>
<td valign="top" align="left">Accuracy 98.34&#x0025; (CNN), 99.69&#x0025; (LSTM)</td>
<td valign="top" align="left">Not reported</td>
<td valign="top" align="left">Standard GPU inference; cloud environment</td>
</tr>
<tr>
<td valign="top" align="left">Wang et al. [<xref ref-type="bibr" rid="B50">50</xref>]</td>
<td valign="top" align="left">PTB-DB (290 subjects), MIT-BIH, ECG-ID</td>
<td valign="top" align="left">Authentication (self-supervised)</td>
<td valign="top" align="left">Accuracy 99.1&#x0025; (PTB), <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM46"><mml:mo>&#x2264;</mml:mo></mml:math></inline-formula>&#x2009;98.5&#x0025; on external datasets</td>
<td valign="top" align="left">Not stated; 8-bit quantized/pruned</td>
<td valign="top" align="left">Cortex-M4F microcontroller; <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM47"><mml:mo>&#x2248;</mml:mo></mml:math></inline-formula>&#x2009;37&#x0025; reduction in compute cost</td>
</tr>
<tr>
<td valign="top" align="left">Mangold et al. [<xref ref-type="bibr" rid="B47">47</xref>]</td>
<td valign="top" align="left"><inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM48"><mml:mo>&#x223C;</mml:mo></mml:math></inline-formula>100,000 patients, 970,000 ECGs</td>
<td valign="top" align="left">Biometric identification (Siamese NN)</td>
<td valign="top" align="left">0.97 AUC</td>
<td valign="top" align="left">Large-scale model (not specified)</td>
<td valign="top" align="left">GPU/cloud environment; long-term ECG fingerprints</td>
</tr>
<tr>
<td valign="top" align="left">Raj and Ray [<xref ref-type="bibr" rid="B48">48</xref>]</td>
<td valign="top" align="left">Real-time embedded ECG monitor &#x0026; MIT-BIH</td>
<td valign="top" align="left">Arrhythmia detection</td>
<td valign="top" align="left">Accuracy 96.14&#x0025;</td>
<td valign="top" align="left">DSP pipeline &#x0026; TSVM</td>
<td valign="top" align="left">Low-power MCU; handcrafted feature extraction</td>
</tr>
<tr>
<td valign="top" align="left">This work</td>
<td valign="top" align="left">PTB-DB, single-lead (aVL), Subject 180 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM49"><mml:mo>+</mml:mo></mml:math></inline-formula> 4 others</td>
<td valign="top" align="left">Personalized biometric verification</td>
<td valign="top" align="left">94.68&#x0025; accuracy, F1 <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM50"><mml:mo>=</mml:mo></mml:math></inline-formula> 94.51&#x0025;, AUC <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM51"><mml:mo>=</mml:mo></mml:math></inline-formula> 0.9986</td>
<td valign="top" align="left">123 kB (float32) <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM52"><mml:mrow><mml:mo mathvariant="bold" stretchy="false">&#x2192;</mml:mo></mml:mrow></mml:math></inline-formula> 48 kB (int8)</td>
<td valign="top" align="left">1.35&#x2009;s total inference on STM32H7 (Cortex-M7); real-time on-device</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Recent progress has begun to shift toward edge-oriented and lightweight implementations. Wang et al. [<xref ref-type="bibr" rid="B50">50</xref>] presented a self-supervised ECG authentication framework optimized for IoT edge sensors, employing model pruning and 8-bit quantization to reduce computational cost by approximately 37&#x0025; on a Cortex-M4F microcontroller, while maintaining near 99&#x0025; accuracy on the PTB database. Similarly, Raj and Ray [<xref ref-type="bibr" rid="B48">48</xref>] demonstrated real-time ECG processing on a low-power microcontroller using classical DSP and machine learning techniques.</p>
<p>In contrast, the present work represents a fully embedded and personalized ECG biometric system capable of performing both signal-quality assessment and identity verification in real time on an STM32H7 microcontroller. Despite its compact memory footprint of only 48&#x2009;kB after 8-bit quantization, the proposed convolutional neural network achieved 94.68&#x0025; classification accuracy and an F1-score of 94.51&#x0025;, with a total inference time of approximately 1.35&#x2009;s per 5&#x2009;s ECG segment.</p>
</sec>
<sec id="s10"><label>10</label><title>Conclusion and future work</title>
<p>This study demonstrated the feasibility of deploying real-time neural network inference for biometric ECG classification directly on a resource-constrained Holter device. By integrating a quantized convolutional neural network onto an STM32H7 microcontroller and employing a two-stage inference pipeline with signal-quality assessment, the system achieves real-time performance with efficient computational resource usage. The implementation confirms that low-power wearable devices can support continuous ECG biometrics with reduced reliance on cloud infrastructure.</p>
<p>While the achieved results validate the technical viability of personalized on-device ECG classification, the findings also highlight the importance of broader data diversity for further refinement. Recordings acquired under similar conditions (e.g., resting or supine posture) may exhibit high morphological consistency, potentially limiting model generalization. Future work will therefore focus on longitudinal data collection across multiple postures, daily activities, and physiological states. This will enable the model to learn invariant, subject-specific cardiac representations and adapt to gradual physiological changes, reducing overfitting while expanding applicability toward early detection of pathological trends. Ultimately, the proposed framework represents a stepping stone toward fully personalized, self-contained, and adaptive cardiac monitoring in everyday life.</p>
</sec>
</body>
<back>
<sec id="s11" sec-type="data-availability"><title>Data availability statement</title>
<p>The original contributions presented in the study are included in the article/Supplementary Material, further inquiries can be directed to the corresponding author/s.</p>
</sec>
<sec id="s12" sec-type="ethics-statement"><title>Ethics statement</title>
<p>Ethical approval was not required for the study involving humans in accordance with the local legislation and institutional requirements. Written informed consent to participate in this study was not required from the participants or the participants&#x2019; legal guardians/next of kin in accordance with the national legislation and the institutional requirements.</p>
</sec>
<sec id="s13" sec-type="author-contributions"><title>Author contributions</title>
<p>MB: Software, Writing &#x2013; original draft, Conceptualization, Visualization, Methodology, Data curation. AM: Methodology, Writing &#x2013; original draft, Supervision, Writing &#x2013; review &#x0026; editing, Software, Data curation, Validation, Conceptualization. MM: Resources, Project administration, Conceptualization, Writing &#x2013; review &#x0026; editing, Funding acquisition, Supervision. EV: Supervision, Writing &#x2013; review &#x0026; editing, Conceptualization, Formal analysis, Methodology. KG: Writing &#x2013; review &#x0026; editing, Data curation, Methodology, Software. LC: Supervision, Writing &#x2013; review &#x0026; editing, Software.</p>
</sec>
<sec id="s15" sec-type="COI-statement"><title>Conflict of interest</title>
<p>The author(s) declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="s16" sec-type="ai-statement"><title>Generative AI statement</title>
<p>The author(s) declared that generative AI was used in the creation of this manuscript. In preparing this manuscript, we utilized generative AI tools to assist with text stylization, grammar correction, and translation into English.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p>
</sec>
<sec id="s17" sec-type="disclaimer"><title>Publisher&#x0027;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list><title>References</title>
<ref id="B1"><label>1.</label><mixed-citation publication-type="other"><collab>World Heart Federation</collab>. <comment><italic>World heart report 2023: Full report</italic> (2023). Available online at: <ext-link ext-link-type="uri" xlink:href="https://world-heart-federation.org/resource/world-heart-report-2023/">https://world-heart-federation.org/resource/world-heart-report-2023/</ext-link> (Accessed March 3, 2025)</comment>.</mixed-citation></ref>
<ref id="B2"><label>2.</label><mixed-citation publication-type="other"><collab>Eurostat</collab>. <comment><italic>Cardiovascular diseases statistics</italic> (2024). Available online at: <ext-link ext-link-type="uri" xlink:href="https://ec.europa.eu/eurostat/statistics-explained/index.php?title=Card iovascular_diseases_statistics">https://ec.europa.eu/eurostat/statistics-explained/index.php&#x003F;title&#x003D;Card iovascular&#x005F;diseases&#x005F;statistics</ext-link> (accessed March 3, 2025)</comment>.</mixed-citation></ref>
<ref id="B3"><label>3.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Anderson</surname> <given-names>E</given-names></name> <name><surname>Durstine</surname> <given-names>JL</given-names></name></person-group>. <article-title>Physical activity, exercise, and chronic diseases: a brief review</article-title>. <source>Sports Med Health Sci</source>. (<year>2019</year>) <volume>1</volume>:<fpage>3</fpage>&#x2013;<lpage>10</lpage>. <pub-id pub-id-type="doi">10.1016/j.smhs.2019.08.006</pub-id><pub-id pub-id-type="pmid">35782456</pub-id></mixed-citation></ref>
<ref id="B4"><label>4.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Iglecias</surname> <given-names>F</given-names></name> <name><surname>Gomez-Guzman</surname> <given-names>M</given-names></name> <name><surname>Valverde-Merino</surname> <given-names>M</given-names></name> <name><surname>Piquer-Martinez</surname> <given-names>C</given-names></name> <name><surname>Zarzuelo</surname> <given-names>M</given-names></name></person-group>. <article-title>Promoting self-care and improving quality of life in cardiovascular disease patients: the role of pharmacist-led interventions in community pharmacies</article-title>. <source>Res Soc Admin Pharm</source>. (<year>2025</year>) <volume>21</volume>:<fpage>172</fpage>&#x2013;<lpage>7</lpage>. <pub-id pub-id-type="doi">10.1016/j.sapharm.2024.12.003</pub-id></mixed-citation></ref>
<ref id="B5"><label>5.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Mond</surname> <given-names>HG</given-names></name></person-group>. <article-title>The spectrum of ambulatory electrocardiographic monitoring</article-title>. <source>Heart Lung Circ</source>. (<year>2017</year>) <volume>26</volume>:<fpage>1160</fpage>&#x2013;<lpage>74</lpage>. <pub-id pub-id-type="doi">10.1016/j.hlc.2017.02.034</pub-id><pub-id pub-id-type="pmid">28487061</pub-id></mixed-citation></ref>
<ref id="B6"><label>6.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Joglar</surname> <given-names>JA</given-names></name> <name><surname>Chung</surname> <given-names>MK</given-names></name> <name><surname>Armbruster</surname> <given-names>AL</given-names></name> <name><surname>Benjamin</surname> <given-names>EJ</given-names></name> <name><surname>Chyou</surname> <given-names>JY</given-names></name> <name><surname>Cronin</surname> <given-names>EM</given-names></name></person-group>, et al. <article-title>2023 ACC/AHA/ACCP/HRS guideline for the diagnosis and management of atrial fibrillation: a report of the American college of cardiology/American heart association joint committee on clinical practice guidelines</article-title>. <source>Circulation</source>. (<year>2024</year>) <volume>149</volume>:<fpage>1</fpage>&#x2013;<lpage>156</lpage>. <pub-id pub-id-type="doi">10.1161/CIR.0000000000001193</pub-id></mixed-citation></ref>
<ref id="B7"><label>7.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Gibson</surname> <given-names>CM</given-names></name> <name><surname>Ciaglo</surname> <given-names>LN</given-names></name> <name><surname>Southard</surname> <given-names>MC</given-names></name> <name><surname>Takao</surname> <given-names>S</given-names></name> <name><surname>Harrigan</surname> <given-names>C</given-names></name> <name><surname>Lewis</surname> <given-names>J</given-names></name></person-group>, et al. <article-title>Diagnostic and prognostic value of ambulatory ECG (holter) monitoring in patients with coronary heart disease: a review</article-title>. <source>J Thromb Thrombolysis</source>. (<year>2007</year>) <volume>23</volume>:<fpage>135</fpage>&#x2013;<lpage>45</lpage>. <pub-id pub-id-type="doi">10.1007/s11239-006-9015-6</pub-id><pub-id pub-id-type="pmid">17221332</pub-id></mixed-citation></ref>
<ref id="B8"><label>8.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Vodi&#x010D;ka</surname> <given-names>S</given-names></name> <name><surname>Naji</surname> <given-names>HF</given-names></name> <name><surname>Zelko</surname> <given-names>E</given-names></name></person-group>. <article-title>The role of telecardiology in dealing with patients with cardiac rhythm disorders in family medicine&#x2014;systematic review</article-title>. <source>Zdr Varst</source>. (<year>2020</year>) <volume>59</volume>:<fpage>108</fpage>&#x2013;<lpage>16</lpage>. <pub-id pub-id-type="doi">10.2478/sjph-2020-0014</pub-id></mixed-citation></ref>
<ref id="B9"><label>9.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Carrington</surname> <given-names>M</given-names></name> <name><surname>Provid&#x00EA;ncia</surname> <given-names>R</given-names></name> <name><surname>Chahal</surname> <given-names>CAA</given-names></name> <name><surname>Ricci</surname> <given-names>F</given-names></name> <name><surname>Epstein</surname> <given-names>AE</given-names></name> <name><surname>Gallina</surname> <given-names>S</given-names></name></person-group>, et al. <article-title>Monitoring and diagnosis of intermittent arrhythmias: evidence-based guidance and role of novel monitoring strategies</article-title>. <source>Eur Heart J Open</source>. (<year>2022</year>) <volume>2</volume>:<fpage>oeac072</fpage>. <pub-id pub-id-type="doi">10.1093/ehjopen/oeac072</pub-id><pub-id pub-id-type="pmid">36440351</pub-id></mixed-citation></ref>
<ref id="B10"><label>10.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Goldberger</surname> <given-names>AL</given-names></name> <name><surname>Amaral</surname> <given-names>LA</given-names></name> <name><surname>Glass</surname> <given-names>L</given-names></name> <name><surname>Hausdorff</surname> <given-names>JM</given-names></name> <name><surname>Ivanov</surname> <given-names>PC</given-names></name> <name><surname>Mark</surname> <given-names>RG</given-names></name></person-group>, et al. <article-title>PhysioBank, PhysioToolkit, and PhysioNet: components of a new research resource for complex physiologic signals: components of a new research resource for complex physiologic signals</article-title>. <source>Circulation</source>. (<year>2000</year>) <volume>101</volume>:<fpage>E215</fpage>&#x2013;<lpage>20</lpage>. <pub-id pub-id-type="doi">10.1161/01.cir.101.23.e215</pub-id><pub-id pub-id-type="pmid">10851218</pub-id></mixed-citation></ref>
<ref id="B11"><label>11.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Minchol&#x00E9;</surname> <given-names>A</given-names></name> <name><surname>Rodriguez</surname> <given-names>B</given-names></name></person-group>. <article-title>Artificial intelligence for the electrocardiogram</article-title>. <source>Nat Med</source>. (<year>2019</year>) <volume>25</volume>:<fpage>22</fpage>&#x2013;<lpage>3</lpage>. <pub-id pub-id-type="doi">10.1038/s41591-018-0306-1</pub-id></mixed-citation></ref>
<ref id="B12"><label>12.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Deserno</surname> <given-names>TM</given-names></name> <name><surname>Marx</surname> <given-names>N</given-names></name></person-group>. <article-title>Computational electrocardiography: revisiting holter ECG monitoring</article-title>. <source>Methods Inf Med</source>. (<year>2016</year>) <volume>55</volume>:<fpage>305</fpage>&#x2013;<lpage>11</lpage>. <pub-id pub-id-type="doi">10.3414/me15-05-0009</pub-id><pub-id pub-id-type="pmid">27406338</pub-id></mixed-citation></ref>
<ref id="B13"><label>13.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Duncker</surname> <given-names>D</given-names></name> <name><surname>Ding</surname> <given-names>WY</given-names></name> <name><surname>Etheridge</surname> <given-names>S</given-names></name> <name><surname>Noseworthy</surname> <given-names>PA</given-names></name> <name><surname>Veltmann</surname> <given-names>C</given-names></name> <name><surname>Yao</surname> <given-names>X</given-names></name></person-group>, et al. <article-title>Smart wearables for cardiac monitoring-real-world use beyond atrial fibrillation</article-title>. <source>Sensors (Basel)</source>. (<year>2021</year>) <volume>21</volume>:<fpage>2539</fpage>. <pub-id pub-id-type="doi">10.3390/s21072539</pub-id><pub-id pub-id-type="pmid">33916371</pub-id></mixed-citation></ref>
<ref id="B14"><label>14.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Sana</surname> <given-names>F</given-names></name> <name><surname>Isselbacher</surname> <given-names>EM</given-names></name> <name><surname>Singh</surname> <given-names>JP</given-names></name> <name><surname>Heist</surname> <given-names>EK</given-names></name> <name><surname>Pathik</surname> <given-names>B</given-names></name> <name><surname>Armoundas</surname> <given-names>AA</given-names></name></person-group>. <article-title>Wearable devices for ambulatory cardiac monitoring: JACC state-of-the-art review</article-title>. <source>J Am Coll Cardiol</source>. (<year>2020</year>) <volume>75</volume>:<fpage>1582</fpage>&#x2013;<lpage>92</lpage>. <pub-id pub-id-type="doi">10.1016/j.jacc.2020.01.046</pub-id><pub-id pub-id-type="pmid">32241375</pub-id></mixed-citation></ref>
<ref id="B15"><label>15.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Gautam</surname> <given-names>N</given-names></name> <name><surname>Ghanta</surname> <given-names>SN</given-names></name> <name><surname>Mueller</surname> <given-names>J</given-names></name> <name><surname>Mansour</surname> <given-names>M</given-names></name> <name><surname>Chen</surname> <given-names>Z</given-names></name> <name><surname>Puente</surname> <given-names>C</given-names></name></person-group>, et al. <article-title>Artificial intelligence, wearables and remote monitoring for heart failure: current and future applications</article-title>. <source>Diagnostics (Basel)</source>. (<year>2022</year>) <volume>12</volume>:<fpage>2964</fpage>. <pub-id pub-id-type="doi">10.3390/diagnostics12122964</pub-id><pub-id pub-id-type="pmid">36552971</pub-id></mixed-citation></ref>
<ref id="B16"><label>16.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Serhani</surname> <given-names>MA</given-names></name> <name><surname>T El Kassabi</surname> <given-names>H</given-names></name> <name><surname>Ismail</surname> <given-names>H</given-names></name> <name><surname>Nujum Navaz</surname> <given-names>A</given-names></name></person-group>. <article-title>ECG monitoring systems: review, architecture, processes, and key challenges</article-title>. <source>Sensors (Basel)</source>. (<year>2020</year>) <volume>20</volume>:<fpage>1796</fpage>. <pub-id pub-id-type="doi">10.3390/s20061796</pub-id><pub-id pub-id-type="pmid">32213969</pub-id></mixed-citation></ref>
<ref id="B17"><label>17.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Hsieh</surname> <given-names>J-C</given-names></name> <name><surname>Li</surname> <given-names>A-H</given-names></name> <name><surname>Yang</surname> <given-names>C-C</given-names></name></person-group>. <article-title>Mobile, cloud, and big data computing: contributions, challenges, and new directions in telecardiology</article-title>. <source>Int J Environ Res Public Health</source>. (<year>2013</year>) <volume>10</volume>:<fpage>6131</fpage>&#x2013;<lpage>53</lpage>. <pub-id pub-id-type="doi">10.3390/ijerph10116131</pub-id><pub-id pub-id-type="pmid">24232290</pub-id></mixed-citation></ref>
<ref id="B18"><label>18.</label><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>Y</given-names></name> <name><surname>Tran</surname> <given-names>P</given-names></name> <name><surname>Wojtusiak</surname> <given-names>J</given-names></name></person-group>. <comment>From wearable device to OpenEMR: 5G edge centered telemedicine and decision support system. In: <italic>Proceedings of the 15th International Joint Conference on Biomedical Engineering Systems and Technologies</italic>. SCITEPRESS - Science and Technology Publications (2022)</comment>.</mixed-citation></ref>
<ref id="B19"><label>19.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Jimma</surname> <given-names>BL</given-names></name></person-group>. <article-title>Artificial intelligence in healthcare: a bibliometric analysis</article-title>. <source>Telemat Inf Rep</source>. (<year>2023</year>) <volume>9</volume>:<fpage>100041</fpage>. <pub-id pub-id-type="doi">10.1016/j.teler.2023.100041</pub-id></mixed-citation></ref>
<ref id="B20"><label>20.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kumar</surname> <given-names>P</given-names></name> <name><surname>Chauhan</surname> <given-names>S</given-names></name> <name><surname>Awasthi</surname> <given-names>LK</given-names></name></person-group>. <article-title>Artificial intelligence in healthcare: review, ethics, trust challenges &#x0026; future research directions</article-title>. <source>Eng Appl Artif Intell</source>. (<year>2023</year>) <volume>120</volume>:<fpage>105894</fpage>. <pub-id pub-id-type="doi">10.1016/j.engappai.2023.105894</pub-id></mixed-citation></ref>
<ref id="B21"><label>21.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Roppelt</surname> <given-names>JS</given-names></name> <name><surname>Kanbach</surname> <given-names>DK</given-names></name> <name><surname>Kraus</surname> <given-names>S</given-names></name></person-group>. <article-title>Artificial intelligence in healthcare institutions: a systematic literature review on influencing factors</article-title>. <source>Technol Soc</source>. (<year>2024</year>) <volume>76</volume>:<fpage>102443</fpage>. <pub-id pub-id-type="doi">10.1016/j.techsoc.2023.102443</pub-id></mixed-citation></ref>
<ref id="B22"><label>22.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Al-Zaiti</surname> <given-names>SS</given-names></name> <name><surname>Martin-Gill</surname> <given-names>C</given-names></name> <name><surname>Z&#x00E8;gre-Hemsey</surname> <given-names>JK</given-names></name> <name><surname>Bouzid</surname> <given-names>Z</given-names></name> <name><surname>Faramand</surname> <given-names>Z</given-names></name> <name><surname>Alrawashdeh</surname> <given-names>MO</given-names></name></person-group>, et al. <article-title>Machine learning for ECG diagnosis and risk stratification of occlusion myocardial infarction</article-title>. <source>Nat Med</source>. (<year>2023</year>) <volume>29</volume>:<fpage>1804</fpage>&#x2013;<lpage>13</lpage>. <pub-id pub-id-type="doi">10.1038/s41591-023-02396-3</pub-id><pub-id pub-id-type="pmid">37386246</pub-id></mixed-citation></ref>
<ref id="B23"><label>23.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Boulif</surname> <given-names>A</given-names></name> <name><surname>Ananou</surname> <given-names>B</given-names></name> <name><surname>Ouladsine</surname> <given-names>M</given-names></name> <name><surname>Delliaux</surname> <given-names>S</given-names></name></person-group>. <article-title>A literature review: ECG-based models for arrhythmia diagnosis using artificial intelligence techniques</article-title>. <source>Bioinform Biol Insights</source>. (<year>2023</year>) <volume>17</volume>:<fpage>11779322221149600</fpage>. <pub-id pub-id-type="doi">10.1177/11779322221149600</pub-id><pub-id pub-id-type="pmid">36798080</pub-id></mixed-citation></ref>
<ref id="B24"><label>24.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Cho</surname> <given-names>Y</given-names></name> <name><surname>Kwon</surname> <given-names>J-M</given-names></name> <name><surname>Kim</surname> <given-names>K-H</given-names></name> <name><surname>Medina-Inojosa</surname> <given-names>JR</given-names></name> <name><surname>Jeon</surname> <given-names>K-H</given-names></name> <name><surname>Cho</surname> <given-names>S</given-names></name></person-group>, et al. <article-title>Artificial intelligence algorithm for detecting myocardial infarction using six-lead electrocardiography</article-title>. <source>Sci Rep</source>. (<year>2020</year>) <volume>10</volume>:<fpage>20495</fpage>. <pub-id pub-id-type="doi">10.1038/s41598-020-77599-6</pub-id><pub-id pub-id-type="pmid">33235279</pub-id></mixed-citation></ref>
<ref id="B25"><label>25.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Liu</surname> <given-names>J</given-names></name> <name><surname>Li</surname> <given-names>Z</given-names></name> <name><surname>Jin</surname> <given-names>Y</given-names></name> <name><surname>Liu</surname> <given-names>Y</given-names></name> <name><surname>Liu</surname> <given-names>C</given-names></name> <name><surname>Zhao</surname> <given-names>L</given-names></name></person-group>, et al. <article-title>A review of arrhythmia detection based on electrocardiogram with artificial intelligence</article-title>. <source>Expert Rev Med Dev</source>. (<year>2022</year>) <volume>19</volume>:<fpage>549</fpage>&#x2013;<lpage>60</lpage>. <pub-id pub-id-type="doi">10.1080/17434440.2022.2115887</pub-id></mixed-citation></ref>
<ref id="B26"><label>26.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Nagarajan</surname> <given-names>VD</given-names></name> <name><surname>Lee</surname> <given-names>S-L</given-names></name> <name><surname>Robertus</surname> <given-names>J-L</given-names></name> <name><surname>Nienaber</surname> <given-names>CA</given-names></name> <name><surname>Trayanova</surname> <given-names>NA</given-names></name> <name><surname>Ernst</surname> <given-names>S</given-names></name></person-group>. <article-title>Artificial intelligence in the diagnosis and management of arrhythmias</article-title>. <source>Eur Heart J</source>. (<year>2021</year>) <volume>42</volume>:<fpage>3904</fpage>&#x2013;<lpage>16</lpage>. <pub-id pub-id-type="doi">10.1093/eurheartj/ehab544</pub-id><pub-id pub-id-type="pmid">34392353</pub-id></mixed-citation></ref>
<ref id="B27"><label>27.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Xiong</surname> <given-names>P</given-names></name> <name><surname>Lee</surname> <given-names>SM-Y</given-names></name> <name><surname>Chan</surname> <given-names>G</given-names></name></person-group>. <article-title>Deep learning for detecting and locating myocardial infarction by electrocardiogram: a literature review</article-title>. <source>Front Cardiovasc Med</source>. (<year>2022</year>) <volume>9</volume>:<fpage>860032</fpage>. <pub-id pub-id-type="doi">10.3389/fcvm.2022.860032</pub-id><pub-id pub-id-type="pmid">35402563</pub-id></mixed-citation></ref>
<ref id="B28"><label>28.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Rhmann</surname> <given-names>W</given-names></name> <name><surname>Khan</surname> <given-names>J</given-names></name> <name><surname>Khan</surname> <given-names>GA</given-names></name> <name><surname>Ashraf</surname> <given-names>Z</given-names></name> <name><surname>Pandey</surname> <given-names>B</given-names></name> <name><surname>Khan</surname> <given-names>MA</given-names></name></person-group>, et al. <article-title>Comparative study of IoT- and AI-based computing disease detection approaches</article-title>. <source>Data Sci Manag</source>. (<year>2025</year>) <volume>8</volume>:<fpage>94</fpage>&#x2013;<lpage>106</lpage>. <pub-id pub-id-type="doi">10.1016/j.dsm.2024.07.004</pub-id></mixed-citation></ref>
<ref id="B29"><label>29.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Sachdeva</surname> <given-names>S</given-names></name> <name><surname>Bhatia</surname> <given-names>S</given-names></name> <name><surname>Al Harrasi</surname> <given-names>A</given-names></name> <name><surname>Shah</surname> <given-names>YA</given-names></name> <name><surname>Anwer</surname> <given-names>K</given-names></name> <name><surname>Philip</surname> <given-names>AK</given-names></name></person-group>, et al. <article-title>Unraveling the role of cloud computing in health care system and biomedical sciences</article-title>. <source>Heliyon</source>. (<year>2024</year>) <volume>10</volume>:<fpage>e29044</fpage>. <pub-id pub-id-type="doi">10.1016/j.heliyon.2024.e29044</pub-id><pub-id pub-id-type="pmid">38601602</pub-id></mixed-citation></ref>
<ref id="B30"><label>30.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Soni</surname> <given-names>D</given-names></name> <name><surname>Kumar</surname> <given-names>N</given-names></name></person-group>. <article-title>Machine learning techniques in emerging cloud computing integrated paradigms: a survey and taxonomy</article-title>. <source>J Netw Comput Appl</source>. (<year>2022</year>) <volume>205</volume>:<fpage>103419</fpage>. <pub-id pub-id-type="doi">10.1016/j.jnca.2022.103419</pub-id></mixed-citation></ref>
<ref id="B31"><label>31.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Bachtiger</surname> <given-names>P</given-names></name> <name><surname>Plymen</surname> <given-names>CM</given-names></name> <name><surname>Pabari</surname> <given-names>PA</given-names></name> <name><surname>Howard</surname> <given-names>JP</given-names></name> <name><surname>Whinnett</surname> <given-names>ZI</given-names></name> <name><surname>Opoku</surname> <given-names>F</given-names></name></person-group>, et al. <article-title>Artificial intelligence, data sensors and interconnectivity: future opportunities for heart failure</article-title>. <source>Card Fail Rev</source>. (<year>2020</year>) <volume>6</volume>:<fpage>e11</fpage>. <pub-id pub-id-type="doi">10.15420/cfr.2019.14</pub-id><pub-id pub-id-type="pmid">32514380</pub-id></mixed-citation></ref>
<ref id="B32"><label>32.</label><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Ding</surname> <given-names>C</given-names></name> <name><surname>Yao</surname> <given-names>T</given-names></name> <name><surname>Wu</surname> <given-names>C</given-names></name> <name><surname>Ni</surname> <given-names>J</given-names></name></person-group>. <comment>Deep learning for personalized electrocardiogram diagnosis: a review (2024)</comment>.</mixed-citation></ref>
<ref id="B33"><label>33.</label><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Hu</surname> <given-names>Y</given-names></name> <name><surname>Chen</surname> <given-names>J</given-names></name> <name><surname>Hu</surname> <given-names>L</given-names></name> <name><surname>Li</surname> <given-names>D</given-names></name> <name><surname>Yan</surname> <given-names>J</given-names></name> <name><surname>Ying</surname> <given-names>H</given-names></name></person-group>, et al. <comment>Personalized heart disease detection via ECG digital twin generation. In: <italic>Proceedings of the Thirty-ThirdInternational Joint Conference on Artificial Intelligence</italic>. California: International Joint Conferences on Artificial Intelligence Organization (2024)</comment>.</mixed-citation></ref>
<ref id="B34"><label>34.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Sau</surname> <given-names>A</given-names></name> <name><surname>Sieliwonczyk</surname> <given-names>E</given-names></name> <name><surname>Patlatzoglou</surname> <given-names>K</given-names></name> <name><surname>Pastika</surname> <given-names>L</given-names></name> <name><surname>McGurk</surname> <given-names>KA</given-names></name> <name><surname>Ribeiro</surname> <given-names>AH</given-names></name></person-group>, et al. <article-title>Artificial intelligence-enhanced electrocardiography for the identification of a sex-related cardiovascular risk continuum: a retrospective cohort study</article-title>. <source>Lancet Digit Health</source>. (<year>2025</year>) <volume>7</volume>:<fpage>e184</fpage>&#x2013;<lpage>94</lpage>. <pub-id pub-id-type="doi">10.1016/j.landig.2024.12.003</pub-id><pub-id pub-id-type="pmid">40015763</pub-id></mixed-citation></ref>
<ref id="B35"><label>35.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Adil</surname> <given-names>M</given-names></name> <name><surname>Farouk</surname> <given-names>A</given-names></name> <name><surname>Ali</surname> <given-names>A</given-names></name> <name><surname>Song</surname> <given-names>H</given-names></name> <name><surname>Jin</surname> <given-names>Z</given-names></name></person-group>. <article-title>Securing tomorrow of next-generation technologies with biometrics, state-of-the-art techniques, open challenges, and future research directions</article-title>. <source>Comput Sci Rev</source>. (<year>2025</year>) <volume>57</volume>:<fpage>100750</fpage>. <pub-id pub-id-type="doi">10.1016/j.cosrev.2025.100750</pub-id></mixed-citation></ref>
<ref id="B36"><label>36.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Yang</surname> <given-names>W</given-names></name> <name><surname>Wang</surname> <given-names>S</given-names></name> <name><surname>Sahri</surname> <given-names>NM</given-names></name> <name><surname>Karie</surname> <given-names>NM</given-names></name> <name><surname>Ahmed</surname> <given-names>M</given-names></name> <name><surname>Valli</surname> <given-names>C</given-names></name></person-group>. <article-title>Biometrics for internet-of-things security: a review</article-title>. <source>Sensors (Basel)</source>. (<year>2021</year>) <volume>21</volume>:<fpage>6163</fpage>. <pub-id pub-id-type="doi">10.3390/s21186163</pub-id><pub-id pub-id-type="pmid">34577370</pub-id></mixed-citation></ref>
<ref id="B37"><label>37.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Minchol&#x00E9;</surname> <given-names>A</given-names></name> <name><surname>Zacur</surname> <given-names>E</given-names></name> <name><surname>Ariga</surname> <given-names>R</given-names></name> <name><surname>Grau</surname> <given-names>V</given-names></name> <name><surname>Rodriguez</surname> <given-names>B</given-names></name></person-group>. <article-title>MRI-based computational torso/biventricular multiscale models to investigate the impact of anatomical variability on the ECG QRS complex</article-title>. <source>Front Physiol</source>. (<year>2019</year>) <volume>10</volume>:<fpage>1103</fpage>. <pub-id pub-id-type="doi">10.3389/fphys.2019.01103</pub-id></mixed-citation></ref>
<ref id="B38"><label>38.</label><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>S&#x00F6;rnmo</surname> <given-names>L</given-names></name> <name><surname>Laguna</surname> <given-names>P</given-names></name></person-group>. <comment>Chapter 6&#x2014;The electrocardiogram&#x2014;a brief background. In: L. S&#x00F6;rnmo and P. Laguna, editors. <italic>Bioelectrical Signal Processing in Cardiac and Neurological Applications</italic>. Burlington: Academic Press. (2005). p. 411&#x2013;452</comment>.</mixed-citation></ref>
<ref id="B39"><label>39.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Agrawal</surname> <given-names>V</given-names></name> <name><surname>Hazratifard</surname> <given-names>M</given-names></name> <name><surname>Elmiligi</surname> <given-names>H</given-names></name> <name><surname>Gebali</surname> <given-names>F</given-names></name></person-group>. <article-title>Electrocardiogram (ECG)-based user authentication using deep learning algorithms</article-title>. <source>Diagnostics</source>. (<year>2023</year>) <volume>13</volume>:<fpage>439</fpage>. <pub-id pub-id-type="doi">10.3390/diagnostics13030439</pub-id><pub-id pub-id-type="pmid">36766544</pub-id></mixed-citation></ref>
<ref id="B40"><label>40.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>B&#x0131;&#x00E7;akc&#x0131;</surname> <given-names>HS</given-names></name> <name><surname>Santopietro</surname> <given-names>M</given-names></name> <name><surname>Guest</surname> <given-names>R</given-names></name></person-group>. <article-title>Activity-based electrocardiogram biometric verification using wearable devices</article-title>. <source>IET Biom</source>. (<year>2023</year>) <volume>12</volume>:<fpage>38</fpage>&#x2013;<lpage>51</lpage>. <pub-id pub-id-type="doi">10.1049/bme2.12105</pub-id></mixed-citation></ref>
<ref id="B41"><label>41.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ingale</surname> <given-names>M</given-names></name> <name><surname>Cordeiro</surname> <given-names>R</given-names></name> <name><surname>Thentu</surname> <given-names>S</given-names></name> <name><surname>Park</surname> <given-names>Y</given-names></name> <name><surname>Karimian</surname> <given-names>N</given-names></name></person-group>. <article-title>ECG biometric authentication: a comparative analysis</article-title>. <source>IEEE Access</source>. (<year>2020</year>) <volume>8</volume>:<fpage>117853</fpage>&#x2013;<lpage>66</lpage>. <pub-id pub-id-type="doi">10.1109/ACCESS.2020.3004464</pub-id></mixed-citation></ref>
<ref id="B42"><label>42.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Jaya Prakash</surname> <given-names>A</given-names></name> <name><surname>Patro</surname> <given-names>KK</given-names></name> <name><surname>Hammad</surname> <given-names>M</given-names></name> <name><surname>Tadeusiewicz</surname> <given-names>R</given-names></name> <name><surname>P&#x0142;awiak</surname> <given-names>P</given-names></name></person-group>. <article-title>BAED: a secured biometric authentication system using ECG signal based on deep learning techniques</article-title>. <source>Biocybern Biomed Eng</source>. (<year>2022</year>) <volume>42</volume>:<fpage>1081</fpage>&#x2013;<lpage>93</lpage>. <pub-id pub-id-type="doi">10.1016/j.bbe.2022.08.004</pub-id></mixed-citation></ref>
<ref id="B43"><label>43.</label><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Labati</surname> <given-names>RD</given-names></name> <name><surname>Piuri</surname> <given-names>V</given-names></name> <name><surname>Sassi</surname> <given-names>R</given-names></name> <name><surname>Scotti</surname> <given-names>F</given-names></name> <name><surname>Sforza</surname> <given-names>G</given-names></name></person-group>. <comment>Adaptive ECG biometric recognition: a study on re-enrollment methods for QRS signals. In: <italic>2014 IEEE Symposium on Computational Intelligence in Biometrics and Identity Management (CIBIM)</italic>. IEEE (2014)</comment>.</mixed-citation></ref>
<ref id="B44"><label>44.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Pereira</surname> <given-names>TMC</given-names></name> <name><surname>Concei&#x00E7;&#x00E3;o</surname> <given-names>RC</given-names></name> <name><surname>Sencadas</surname> <given-names>V</given-names></name> <name><surname>Sebasti&#x00E3;o</surname> <given-names>R</given-names></name></person-group>. <article-title>Biometric recognition: a systematic review on electrocardiogram data acquisition methods</article-title>. <source>Sensors (Basel)</source>. (<year>2023</year>) <volume>23</volume>:<fpage>1507</fpage>. <pub-id pub-id-type="doi">10.3390/s23031507</pub-id><pub-id pub-id-type="pmid">36772546</pub-id></mixed-citation></ref>
<ref id="B45"><label>45.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>Y</given-names></name> <name><surname>Xiao</surname> <given-names>Z</given-names></name> <name><surname>Guo</surname> <given-names>Z</given-names></name> <name><surname>Wang</surname> <given-names>Z</given-names></name></person-group>. <article-title>ECG-based personal recognition using a convolutional neural network</article-title>. <source>Pattern Recognit Lett</source>. (<year>2019</year>) <volume>125</volume>:<fpage>668</fpage>&#x2013;<lpage>76</lpage>. <pub-id pub-id-type="doi">10.1016/j.patrec.2019.07.009</pub-id></mixed-citation></ref>
<ref id="B46"><label>46.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Shusterman</surname> <given-names>V</given-names></name> <name><surname>London</surname> <given-names>B</given-names></name></person-group>. <article-title>Personalized ECG monitoring and adaptive machine learning</article-title>. <source>J Electrocardiol</source>. (<year>2024</year>) <volume>82</volume>:<fpage>131</fpage>&#x2013;<lpage>5</lpage>. <pub-id pub-id-type="doi">10.1016/j.jelectrocard.2023.12.006</pub-id><pub-id pub-id-type="pmid">38128158</pub-id></mixed-citation></ref>
<ref id="B47"><label>47.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Mangold</surname> <given-names>KE</given-names></name> <name><surname>Carter</surname> <given-names>RE</given-names></name> <name><surname>Siontis</surname> <given-names>KC</given-names></name> <name><surname>Noseworthy</surname> <given-names>PA</given-names></name> <name><surname>Lopez-Jimenez</surname> <given-names>F</given-names></name> <name><surname>Asirvatham</surname> <given-names>SJ</given-names></name></person-group>, et al. <article-title>Unlocking the potential of artificial intelligence in electrocardiogram biometrics: age-related changes, anomaly detection, and data authenticity in mobile health platforms</article-title>. <source>Eur Heart J Digit Health</source>. (<year>2024</year>) <volume>5</volume>:<fpage>314</fpage>&#x2013;<lpage>23</lpage>. <pub-id pub-id-type="doi">10.1093/ehjdh/ztae024</pub-id><pub-id pub-id-type="pmid">38774362</pub-id></mixed-citation></ref>
<ref id="B48"><label>48.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Raj</surname> <given-names>S</given-names></name> <name><surname>Ray</surname> <given-names>KC</given-names></name></person-group>. <article-title>A personalized point-of-care platform for real-time ECG monitoring</article-title>. <source>IEEE Trans Consum Electron</source>. (<year>2018</year>) <volume>64</volume>:<fpage>452</fpage>&#x2013;<lpage>60</lpage>. <pub-id pub-id-type="doi">10.1109/TCE.2018.2877481</pub-id></mixed-citation></ref>
<ref id="B49"><label>49.</label><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Rahman</surname> <given-names>S</given-names></name> <name><surname>Faezipour</surname> <given-names>M</given-names></name></person-group>. <comment>Automatic arrhythmia detection using deep learning (2022)</comment>.</mixed-citation></ref>
<ref id="B50"><label>50.</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>G</given-names></name> <name><surname>Shanker</surname> <given-names>S</given-names></name> <name><surname>Nag</surname> <given-names>A</given-names></name> <name><surname>Lian</surname> <given-names>Y</given-names></name> <name><surname>John</surname> <given-names>D</given-names></name></person-group>. <article-title>ECG biometric authentication using self-supervised learning for IoT edge sensors</article-title>. <source>IEEE J Biomed Health Inform</source>. (<year>2024</year>) <volume>28</volume>:<fpage>6606</fpage>&#x2013;<lpage>18</lpage>. <pub-id pub-id-type="doi">10.1109/JBHI.2024.3455803</pub-id><pub-id pub-id-type="pmid">39250357</pub-id></mixed-citation></ref>
<ref id="B51"><label>51.</label><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Bousseljot</surname> <given-names>R-D</given-names></name> <name><surname>Kreiseler</surname> <given-names>D</given-names></name> <name><surname>Schnabel</surname> <given-names>A</given-names></name></person-group>. <comment>Data from: The PTB diagnostic ECG database (2004)</comment>.</mixed-citation></ref>
<ref id="B52"><label>52.</label><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Berki</surname> <given-names>M</given-names></name> <name><surname>Svitan</surname> <given-names>D</given-names></name> <name><surname>Micjan</surname> <given-names>M</given-names></name></person-group>. <comment>Quality-based ECG classification: deep learning approach for long-duration holter recordings. In: <italic>2024 15th International Conference on Advanced Semiconductor Devices and Microsystems (ASDAM)</italic>. IEEE (2024). p. 1&#x2013;4</comment>.</mixed-citation></ref>
<ref id="B53"><label>53.</label><mixed-citation publication-type="other"><collab>ONNX Runtime developers</collab>. <comment><italic>ONNX Runtime</italic> (2021). Available online at: <ext-link ext-link-type="uri" xlink:href="https://onnxruntime.ai/">https://onnxruntime.ai/</ext-link> (Accessed March 3, 2025)</comment>.</mixed-citation></ref>
<ref id="B54"><label>54.</label><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Cernaj</surname> <given-names>L</given-names></name> <name><surname>Zavodnik</surname> <given-names>T</given-names></name> <name><surname>Kozarik</surname> <given-names>J</given-names></name> <name><surname>Debnar</surname> <given-names>T</given-names></name> <name><surname>Micjan</surname> <given-names>M</given-names></name> <name><surname>Donoval</surname> <given-names>M</given-names></name></person-group>, et al. <comment>Advanced ECG holter with 2.4 GHz communication. In: <italic>2022 14th International Conference on Advanced Semiconductor Devices and Microsystems (ASDAM)</italic>. IEEE (2022)</comment>.</mixed-citation></ref>
<ref id="B55"><label>55.</label><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>O&#x2019;Shea</surname> <given-names>K</given-names></name> <name><surname>Nash</surname> <given-names>R</given-names></name></person-group>. <comment>An introduction to convolutional neural networks (2015)</comment>.</mixed-citation></ref>
<ref id="B56"><label>56.</label><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Lai</surname> <given-names>L</given-names></name> <name><surname>Suda</surname> <given-names>N</given-names></name> <name><surname>Chandra</surname> <given-names>V</given-names></name></person-group>. <comment>CMSIS-NN: Efficient neural network kernels for arm Cortex-M CPUs (2018)</comment></mixed-citation></ref>
<ref id="B57"><label>57.</label><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Howard</surname> <given-names>AG</given-names></name> <name><surname>Zhu</surname> <given-names>M</given-names></name> <name><surname>Chen</surname> <given-names>B</given-names></name> <name><surname>Kalenichenko</surname> <given-names>D</given-names></name> <name><surname>Wang</surname> <given-names>W</given-names></name> <name><surname>Weyand</surname> <given-names>T</given-names></name></person-group>, et al. <comment>MobileNets: Efficient convolutional neural networks for mobile vision applications (2017)</comment>.</mixed-citation></ref></ref-list>
<fn-group>
<fn id="n1" fn-type="custom" custom-type="edited-by"><p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/823248/overview">Toshiyo Tamura</ext-link>, Waseda University, Japan</p></fn>
<fn id="n2" fn-type="custom" custom-type="reviewed-by"><p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3192816/overview">Saeka Rahman</ext-link>, Indiana University Purdue University, United States</p>
<p><ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3207485/overview">Sara Daas</ext-link>, University of Annaba, Algeria</p></fn>
</fn-group>
</back>
</article>