<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article article-type="research-article" dtd-version="2.3" xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Physiol.</journal-id>
<journal-title>Frontiers in Physiology</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Physiol.</abbrev-journal-title>
<issn pub-type="epub">1664-042X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">1486763</article-id>
<article-id pub-id-type="doi">10.3389/fphys.2025.1486763</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Physiology</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>An emotion recognition method based on frequency-domain features of PPG</article-title>
<alt-title alt-title-type="left-running-head">Zhu et al.</alt-title>
<alt-title alt-title-type="right-running-head">
<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fphys.2025.1486763">10.3389/fphys.2025.1486763</ext-link>
</alt-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name>
<surname>Zhu</surname>
<given-names>Zhibin</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2825590/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Wang</surname>
<given-names>Xuanyi</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1467152/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Xu</surname>
<given-names>Yifei</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Chen</surname>
<given-names>Wanlin</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1947660/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Zheng</surname>
<given-names>Jing</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2290809/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Chen</surname>
<given-names>Shulin</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Chen</surname>
<given-names>Hang</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<xref ref-type="aff" rid="aff4">
<sup>4</sup>
</xref>
<xref ref-type="corresp" rid="c001">&#x2a;</xref>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/"/>
</contrib>
</contrib-group>
<aff id="aff1">
<sup>1</sup>
<institution>College of Biomedical Engineering and Instrument Science</institution>, <institution>Zhejiang University</institution>, <addr-line>Hangzhou</addr-line>, <country>China</country>
</aff>
<aff id="aff2">
<sup>2</sup>
<institution>Department of Psychology and Behaviorial Sciences</institution>, <institution>Zhejiang University</institution>, <addr-line>Hangzhou</addr-line>, <country>China</country>
</aff>
<aff id="aff3">
<sup>3</sup>
<institution>Zhejiang Provincial Key Laboratory of Cardio-Cerebral Vascular Detection Technology and Medicinal Effectiveness Appraisal</institution>, <addr-line>Hangzhou</addr-line>, <country>China</country>
</aff>
<aff id="aff4">
<sup>4</sup>
<institution>Connected Healthcare Big Data Research Center</institution>, <institution>Zhejiang Lab</institution>, <addr-line>Hangzhou</addr-line>, <country>China</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>
<bold>Edited by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/532776/overview">Rajesh Kumar Tripathy</ext-link>, Birla Institute of Technology and Science, India</p>
</fn>
<fn fn-type="edited-by">
<p>
<bold>Reviewed by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/24124/overview">Bingmei M. Fu</ext-link>, City College of New York (CUNY), United States</p>
<p>
<ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/1121474/overview">Anton R. Kiselev</ext-link>, National Research Center for Preventive Medicine, Russia</p>
</fn>
<corresp id="c001">&#x2a;Correspondence: Hang Chen, <email>ch-sun@263.net</email>
</corresp>
</author-notes>
<pub-date pub-type="epub">
<day>25</day>
<month>02</month>
<year>2025</year>
</pub-date>
<pub-date pub-type="collection">
<year>2025</year>
</pub-date>
<volume>16</volume>
<elocation-id>1486763</elocation-id>
<history>
<date date-type="received">
<day>29</day>
<month>08</month>
<year>2024</year>
</date>
<date date-type="accepted">
<day>29</day>
<month>01</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2025 Zhu, Wang, Xu, Chen, Zheng, Chen and Chen.</copyright-statement>
<copyright-year>2025</copyright-year>
<copyright-holder>Zhu, Wang, Xu, Chen, Zheng, Chen and Chen</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<sec>
<title>Objective</title>
<p>This study aims to employ physiological model simulation to systematically analyze the frequency-domain components of PPG signals and extract their key features. The efficacy of these frequency-domain features in effectively distinguishing emotional states will also be investigated.</p>
</sec>
<sec>
<title>Methods</title>
<p>A dual windkessel model was employed to analyze PPG signal frequency components and extract distinctive features. Experimental data collection encompassed both physiological (PPG) and psychological measurements, with subsequent analysis involving distribution patterns and statistical testing (U-tests) to examine feature-emotion relationships. The study implemented support vector machine (SVM) classification to evaluate feature effectiveness, complemented by comparative analysis using pulse rate variability (PRV) features, morphological features, and the DEAP dataset.</p>
</sec>
<sec>
<title>Results</title>
<p>The results demonstrate significant differentiation in PPG frequency-domain feature responses to arousal and valence variations, achieving classification accuracies of 87.5% and 81.4%, respectively. Validation on the DEAP dataset yielded consistent patterns with accuracies of 73.5% (arousal) and 71.5% (valence). Feature fusion incorporating the proposed frequency-domain features enhanced classification performance, surpassing 90% accuracy.</p>
</sec>
<sec>
<title>Conclusion</title>
<p>This study uses physiological modeling to analyze PPG signal frequency components and extract key features. We evaluate their effectiveness in emotion recognition and reveal relationships among physiological parameters, frequency features, and emotional states.</p>
</sec>
<sec>
<title>Significance</title>
<p>These findings advance understanding of emotion recognition mechanisms and provide a foundation for future research.</p>
</sec>
</abstract>
<kwd-group>
<kwd>photoplethysmography (PPG)</kwd>
<kwd>emotion recognition</kwd>
<kwd>support vector machine (SVM)</kwd>
<kwd>PPG frequency-domian analysis</kwd>
<kwd>dual windkessel model</kwd>
</kwd-group>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Computational Physiology and Medicine</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec id="s1">
<title>1 Introduction</title>
<p>Emotions represent a complex array of psychological and physiological reactions that individuals experience in response to specific stimuli (<xref ref-type="bibr" rid="B47">Shu et al., 2018</xref>). The spectrum of emotional states can exert varying influences on an individual&#x2019;s physical and mental wellbeing, potentially precipitating severe health conditions (<xref ref-type="bibr" rid="B39">Ong et al., 2006</xref>) For instance, chronic exposure to negative emotional states has been linked to the etiology of mood disorders such as depression and anxiety (<xref ref-type="bibr" rid="B6">Button et al., 2012</xref>; <xref ref-type="bibr" rid="B18">Gou et al., 2023</xref>). Consequently, the accurate identification of emotions has emerged as a pivotal area of inquiry within the realm of psychological research.</p>
<p>The method of emotion assessment based on physiological signals stands out for its capability to collect data autonomously and discern emotional states (<xref ref-type="bibr" rid="B37">Maria et al., 2019</xref>), offering a significant advantage over traditional approaches that rely on subjective emotional scales (<xref ref-type="bibr" rid="B5">Bradley and Lang, 1994</xref>) and physical cues (<xref ref-type="bibr" rid="B3">Anthony and Patil, 2023</xref>; <xref ref-type="bibr" rid="B19">Harms et al., 2010</xref>; <xref ref-type="bibr" rid="B21">Hasan et al., 2019</xref>). Unlike these, physiological signals, which are inherently spontaneous and less prone to subjective influences, provide a more objective measure of emotional responses (<xref ref-type="bibr" rid="B24">Jang et al., 2014</xref>). The activation of emotions is inherently linked to the central nervous system&#x2019;s regulatory functions. This has prompted a multitude of studies to extract multifaceted features from electroencephalogram (EEG) signals (<xref ref-type="bibr" rid="B2">Alvarez-Jimenez et al., 2024</xref>; <xref ref-type="bibr" rid="B23">Issa et al., 2021</xref>; <xref ref-type="bibr" rid="B34">Li et al., 2022</xref>; <xref ref-type="bibr" rid="B45">Sarma and Barma, 2021</xref>), aiming to construct models for emotion identification or to investigate effective methods through the application of deep learning algorithms (<xref ref-type="bibr" rid="B9">Dhara et al., 2023</xref>; <xref ref-type="bibr" rid="B26">Joshi and Ghongade, 2021</xref>). Beyond EEG, the realm of emotion identification research has also incorporated a range of other physiological signals. These include electrocardiogram (ECG) (<xref ref-type="bibr" rid="B22">Hsu et al., 2020</xref>; <xref ref-type="bibr" rid="B44">Sarkar et al., 2022</xref>), which captures the heart&#x2019;s electrical activity; electromyography (EMG) (<xref ref-type="bibr" rid="B31">Kulke et al., 2020</xref>; <xref ref-type="bibr" rid="B46">Sato et al., 2008</xref>), which measures muscle electrical activity; and galvanic skin response (GSR) (<xref ref-type="bibr" rid="B17">Goshvarpour et al., 2017</xref>; <xref ref-type="bibr" rid="B51">Wen et al., 2014</xref>), which reflects the body&#x2019;s sweat gland activity in response to emotional stimuli. Each of these modalities contributes unique insights into the complex interplay between physiological responses and emotional experiences.</p>
<p>The burgeoning ubiquity of portable devices has catapulted photoplethysmography (PPG) into the spotlight of research communities, thanks to its notable benefits such as ease of acquisition, operational simplicity, and minimal equipment costs. Concurrently, the existing body of research has established that a plethora of physiological changes triggered by emotional stimuli are modulated by the autonomic nervous system (ANS) (<xref ref-type="bibr" rid="B16">Gordan et al., 2015</xref>; <xref ref-type="bibr" rid="B41">Rainville et al., 2006</xref>), impacting vital organs like the heart, blood vessels, and muscles (<xref ref-type="bibr" rid="B20">Harris and Matthews, 2004</xref>; <xref ref-type="bibr" rid="B28">Kleiger et al., 2005</xref>). These physiological shifts are vividly reflected in PPG signals, serving as a tangible indicator of the body&#x2019;s response to emotions (<xref ref-type="bibr" rid="B7">Chakraborty et al., 2020</xref>). For instance, the emotion of fear can induce vasoconstriction and tachycardia (<xref ref-type="bibr" rid="B25">Johnstone, 1971</xref>), while anger may lead to vasodilation in facial blood vessels, resulting in blushing and arrhythmia (<xref ref-type="bibr" rid="B10">Drummond, 1999</xref>). The spectrum of human emotions elicits a diverse array of effects on PPG signals (<xref ref-type="bibr" rid="B8">Davydov et al., 2011</xref>; <xref ref-type="bibr" rid="B30">Krumhansl, 1997</xref>), each offering a unique perspective on the intricate relationship between emotional states and physiological responses (<xref ref-type="bibr" rid="B38">Nummenmaa et al., 2014</xref>).</p>
<p>To date, the body of research leveraging photoplethysmography (PPG) signals for precise emotion recognition remains modest. Notable contributions include Paul&#x2019;s work (<xref ref-type="bibr" rid="B40">Paul et al., 2024</xref>), where a novel time-domain feature was extracted from the DEAP (<xref ref-type="bibr" rid="B29">Koelstra et al., 2012</xref>) dataset to discern various emotional states. <xref ref-type="bibr" rid="B4">Beckmann et al. (2019)</xref>, in another study, employed dual sensors to capture PPG&#x2019;s Perfusion Time to Peak (PTT) features, subsequently integrating them into the realm of wearable device-based emotion recognition research. <xref ref-type="bibr" rid="B35">Li et al. (2017)</xref> contributed by acquiring both PPG morphological and PRV features to differentiate between states of sadness and happiness. Furthermore, <xref ref-type="bibr" rid="B49">Wang and Yu (2021)</xref> and <xref ref-type="bibr" rid="B32">Lee et al. (2020)</xref> have ventured into the application of deep learning methodologies for the analysis of PPG signals in emotion recognition, showcasing the potential of these advanced techniques.</p>
<p>Previous studies have made significant progress in the field of emotion recognition using PPG signals, yet further exploration remains warranted. Current research predominantly focuses on PRV and morphological features, with limited exploration of frequency-domain analysis of PPG waveforms. Given that the frequency domain of signals often contains substantial valuable information, this study aims to employ physiological model simulation to systematically analyze the frequency-domain components of PPG signals and extract their key features. Furthermore, this research will investigate the efficacy of these frequency-domain features in effectively distinguishing emotional states, thereby contributing to the advancement of emotion recognition methodologies.</p>
</sec>
<sec sec-type="methods" id="s2">
<title>2 Methods</title>
<p>The schematic diagram presented in <xref ref-type="fig" rid="F1">Figure 1</xref> delineates the emotion recognition methodology predicated on frequency-domain features derived from photoplethysmography (PPG) signals. The process encompasses several pivotal steps: 1) Constructing physiological simulation models, conducting frequency domain analysis, and extracting key features; 2) Designing experiments to collect PPG signals and preprocessing them; 3) Accurate recognition of emotional states; 4) Extracting PRV and morphological features for comparison and finally 5) Verifying the recognition universality based on PPG signals collected from the DEAP dataset.</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption>
<p>The schematic diagram of the emotion recognition method.</p>
</caption>
<graphic xlink:href="fphys-16-1486763-g001.tif"/>
</fig>
<sec id="s2-1">
<title>2.1 Frequency domain analysis based on simulated physiological model</title>
<p>Consequently, the development of PPG simulation models is of paramount importance for investigating the influence of different physiological factors on PPG signals. Among the available cardiovascular system models, the dual windkessel model <xref ref-type="bibr" rid="B15">Goldwyn and Watt (1967)</xref> proposed by Goldwyn and Watt stands out as one of the most frequently utilized frameworks. This model, along with its equivalent circuit representation, is depicted in <xref ref-type="fig" rid="F2">Figure 2</xref>, providing a visual and theoretical foundation for understanding the complex dynamics of the cardiovascular system as they relate to PPG signal generation. This study constructed a simulation design for Simulink based on this model and obtained 7 variable parameters as shown in <xref ref-type="table" rid="T1">Table 1</xref>.</p>
<fig id="F2" position="float">
<label>FIGURE 2</label>
<caption>
<p>
<bold>(A)</bold> The dual windkessel model. <bold>(B)</bold> The equivalent circuit of the dual windkessel model.</p>
</caption>
<graphic xlink:href="fphys-16-1486763-g002.tif"/>
</fig>
<table-wrap id="T1" position="float">
<label>TABLE 1</label>
<caption>
<p>The 7 variable parameters of the model.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Parameters</th>
<th align="center">Description</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">
<italic>R</italic>
</td>
<td align="center">The magnitude of peripheral vascular resistance</td>
</tr>
<tr>
<td align="center">
<italic>L</italic>
</td>
<td align="center">The magnitude of blood flow inertia</td>
</tr>
<tr>
<td align="center">
<italic>C</italic>
<sub>
<italic>1</italic>
</sub>
</td>
<td align="center">Aggregate compliance of the aortic arch and its main branches</td>
</tr>
<tr>
<td align="center">
<italic>C</italic>
<sub>
<italic>2</italic>
</sub>
</td>
<td align="center">Total compliance of aorta and peripheral blood vessels</td>
</tr>
<tr>
<td align="center">
<italic>Q</italic>
<sub>
<italic>0</italic>
</sub>
</td>
<td align="center">Extreme point of blood flow</td>
</tr>
<tr>
<td align="center">
<italic>Ts</italic>
</td>
<td align="center">The duration of systole</td>
</tr>
<tr>
<td align="center">
<italic>Td</italic>
</td>
<td align="center">The duration of the heartbeat cycle</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>By systematically modulating the aforementioned variable parameters, significant alterations were observed in both the simulated PPG waveforms and their corresponding frequency spectra. As illustrated in <xref ref-type="fig" rid="F3">Figure 3</xref>, the morphological state of the PPG waveforms exhibited visually discernible variations. Furthermore, distinct differences were identified in the spectral peaks of the frequency domain representation, demonstrating the sensitivity of these spectral components to parameter variations.</p>
<fig id="F3" position="float">
<label>FIGURE 3</label>
<caption>
<p>The PPGs under different states based on simulation. Notes: The physiological parameters were configured as follows: Left-side: R &#x3d; 0.8, C&#x2081; &#x3d; 0.8, C&#x2082; &#x3d; 0.18, L &#x3d; 0.008, T<sub>d</sub> &#x3d; 0.77, T<sub>s</sub> &#x3d; 0.28, Q&#x2080; &#x3d; 395; Right-side: R &#x3d; 1.2, C&#x2081; &#x3d; 1.2, C&#x2082; &#x3d; 0.20, L &#x3d; 0.012, T<sub>d</sub> &#x3d; 0.67, T<sub>s</sub> &#x3d; 0.25, Q&#x2080; &#x3d; 450.</p>
</caption>
<graphic xlink:href="fphys-16-1486763-g003.tif"/>
</fig>
<p>Our analysis reveals that the frequency-domain information of the photoplethysmography (PPG) signal is predominantly characterized by its fundamental frequency and two harmonic frequency bands. Consequently, we derived multiple power-related features and ratio-based features from these three frequency bands as shown in <xref ref-type="fig" rid="F4">Figure 4</xref>. The specific features and their corresponding variations in response to parameter changes during the simulation are comprehensively presented in <xref ref-type="table" rid="T2">Table 2</xref>. The computation of these features was performed through the following procedure: First, the average heart rate (HR) was obtained through preliminary data processing. Subsequently, Fast Fourier Transform (FFT) analysis was applied to the data segment. The power spectral density within the frequency bands of &#xb1;0.2&#xa0;Hz centered at the fundamental HR frequency was identified as the Basic Frequency component (BF). Similarly, the power within &#xb1;0.2&#xa0;Hz bands centered at twice and three times the HR frequency were designated as the First Harmonic Frequency (FHF) and Second Harmonic Frequency (SHF) components, respectively. The computational methodology for these ratio-based features is further detailed in <xref ref-type="table" rid="T2">Table 2</xref>.</p>
<fig id="F4" position="float">
<label>FIGURE 4</label>
<caption>
<p>Frequency domain information of PPG.</p>
</caption>
<graphic xlink:href="fphys-16-1486763-g004.tif"/>
</fig>
<table-wrap id="T2" position="float">
<label>TABLE 2</label>
<caption>
<p>Description of PPG frequency domain features.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Features</th>
<th align="center">Description</th>
<th align="center">
<italic>R &#x2191;</italic>
</th>
<th align="center">
<italic>C1 &#x2191;</italic>
</th>
<th align="center">
<italic>C2 &#x2191;</italic>
</th>
<th align="center">
<italic>L &#x2191;</italic>
</th>
<th align="center">
<italic>Q0 &#x2191;</italic>
</th>
<th align="center">
<italic>Td &#x2191;</italic>
</th>
<th align="center">
<italic>Ts &#x2191;</italic>
</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">BF</td>
<td align="center">Power of the base frequency band</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
</tr>
<tr>
<td align="center">BFn</td>
<td align="center">Standardized power of the base frequency band, BF/(BF &#x2b; FHF &#x2b; SHF)</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
</tr>
<tr>
<td align="center">FHF</td>
<td align="center">Power of the first harmonic frequency band</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
</tr>
<tr>
<td align="center">FHFn</td>
<td align="center">Standardized power of the first harmonic frequency band, FHF/(BF &#x2b; FHF &#x2b; SHF)</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>0</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
</tr>
<tr>
<td align="center">SHF</td>
<td align="center">Power of the second harmonic frequency band</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>0</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
</tr>
<tr>
<td align="center">SHFn</td>
<td align="center">Standardized power of the second harmonic frequency band, SHF/(BF &#x2b; FHF &#x2b; SHF)</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>0</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
</tr>
<tr>
<td align="center">FHFBF</td>
<td align="center">FHF/BF</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>0</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
</tr>
<tr>
<td align="center">SHFBF</td>
<td align="center">SHF/BF</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>0</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
</tr>
<tr>
<td align="center">SHFFHF</td>
<td align="center">SHF/FHF</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
<td align="center">
<bold>0</bold>
</td>
<td align="center">
<bold>&#x2191;</bold>
</td>
<td align="center">
<bold>&#x2193;</bold>
</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The analysis demonstrates distinct patterns in frequency features corresponding to variations in hemodynamic parameters: (1) Increased peripheral vascular resistance leads to attenuation of the fundamental frequency while enhancing harmonic frequencies. (2) Elevated vascular compliance results in amplification of the first harmonic frequency, accompanied by attenuation of both the fundamental frequency and the second harmonic frequency. (3) Augmented blood flow inertia induces enhancement of both the fundamental frequency and the first harmonic frequency, with the latter exhibiting more pronounced amplification, while simultaneously causing attenuation of the second harmonic frequency. Furthermore, other physiological parameters also exert significant influences on these features.</p>
</sec>
<sec id="s2-2">
<title>2.2 Dataset and preprocessing</title>
<p>This dataset is anchored in <xref ref-type="bibr" rid="B11">Ekman and Friesen (1971)</xref> theory of discrete emotions, which posits that emotions are distinct, universally identified mental states. In alignment with this theoretical framework and to ensure the selection of authentic and impactful emotion-inducing materials, our research team referred to authoritative emotion databases. Notably, we took cues from the DECAF database (<xref ref-type="bibr" rid="B1">Abadi et al., 2015</xref>) in curating a selection of video materials designed to elicit a variety of emotional responses. Our team has conducted extensive prior research to thoroughly assess and validate the efficacy of these selected materials in inducing the intended emotions during emotion induction experiments (<xref ref-type="bibr" rid="B50">Wang et al., 2022</xref>). The specific materials chosen for this study are detailed in <xref ref-type="table" rid="T3">Table 3</xref>, where each entry corresponds to a particular emotional state aimed to be induced.</p>
<table-wrap id="T3" position="float">
<label>TABLE 3</label>
<caption>
<p>Emotion inducing materials selected.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Source Movie</th>
<th align="center">Duration(s)</th>
<th align="center">Arousal</th>
<th align="center">Valence</th>
<th align="center">Emotion</th>
<th align="center">Scene description</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">&#x2014;</td>
<td align="center">192</td>
<td align="center">3.96 &#xb1; 1.72</td>
<td align="center">5.21 &#xb1; 1.34</td>
<td align="center">Calmness</td>
<td align="center">Daily life of a family</td>
</tr>
<tr>
<td align="center">Up</td>
<td align="center">67</td>
<td align="center">6.51 &#xb1; 1.75</td>
<td align="center">6.87 &#xb1; 1.39</td>
<td align="center">Positive</td>
<td align="center">Carl&#x2014;a shy, quiet boy&#x2014;meets the energetic Ellie</td>
</tr>
<tr>
<td align="center">The Truman Show</td>
<td align="center">60</td>
<td align="center">5.8 &#xb1; 1.88</td>
<td align="center">6.29 &#xb1; 1.43</td>
<td align="center">Positive</td>
<td align="center">Truman and his lover go to the beach for a romantic evening</td>
</tr>
<tr>
<td align="center">Wall-E</td>
<td align="center">93</td>
<td align="center">6.01 &#xb1; 1.86</td>
<td align="center">7.26 &#xb1; 1.36</td>
<td align="center">Positive</td>
<td align="center">all-E and Eve spend a romantic night together</td>
</tr>
<tr>
<td align="center">Gandhi</td>
<td align="center">123</td>
<td align="center">6.15 &#xb1; 1.91</td>
<td align="center">3.64 &#xb1; 1.23</td>
<td align="center">Negative</td>
<td align="center">Indian attorney gets thrown out of a first-class train compartment</td>
</tr>
<tr>
<td align="center">My Bodyguard</td>
<td align="center">101</td>
<td align="center">5.32 &#xb1; 2.12</td>
<td align="center">3.48 &#xb1; 1.31</td>
<td align="center">Negative</td>
<td align="center">Group of thugs provoke a teenager</td>
</tr>
<tr>
<td align="center">The Shining</td>
<td align="center">78</td>
<td align="center">7.41 &#xb1; 1.62</td>
<td align="center">2.93 &#xb1; 1.46</td>
<td align="center">Negative</td>
<td align="center">A child enters hotel room searching for his mom</td>
</tr>
<tr>
<td align="center">Black Swan</td>
<td align="center">62</td>
<td align="center">8.22 &#xb1; 1.14</td>
<td align="center">2.38 &#xb1; 1.81</td>
<td align="center">Negative</td>
<td align="center">A woman notices paranormal activity around her</td>
</tr>
<tr>
<td align="center">My Girl</td>
<td align="center">66</td>
<td align="center">6.39 &#xb1; 1.58</td>
<td align="center">3.07 &#xb1; 1.4</td>
<td align="center">Negative</td>
<td align="center">A young girl cries at her friend&#x2019;s funeral</td>
</tr>
<tr>
<td align="center">Bambi</td>
<td align="center">166</td>
<td align="center">6.19 &#xb1; 1.88</td>
<td align="center">3.48 &#xb1; 1.36</td>
<td align="center">Negative</td>
<td align="center">The fawn Bambi&#x2019;s mother is killed by a deer hunter</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The experimental protocol for this dataset was granted approval by the Medical Ethics Committee of the Department of Psychology and Behavioral Sciences at Zhejiang University, as evidenced by the ethical review document (Zhejiang University Psychological Ethics Review [2022] No. 059). A total of 192 students from Zhejiang University were initially recruited to partake in the study. Eligibility criteria for participants included having normal vision, hearing, and perception abilities, as well as being free from any physical or psychological conditions that could potentially influence emotional responses. Throughout the experimental process, stringent quality control measures were implemented. Regrettably, the dataset was compromised due to several factors: (1) instrumental operational errors resulted in the unsuccessful acquisition of 12 cases; (2) 4 cases were excluded due to participants&#x2019; personal reasons; and (3) preliminary quality assessment led to the elimination of 19 cases owing to suboptimal physiological signal acquisition. This data attrition, while regrettable, was necessary to maintain the integrity and reliability of the study. Consequently, the dataset was refined to include data and relevant information from 157 participants who met the criteria, comprising 96 females and 61 males.</p>
<p>The comprehensive experimental protocol for each participant was conducted in a dimly lit room, ensuring minimal interference from ambient light sources, with the exception of the computer display screen, as depicted in <xref ref-type="fig" rid="F5">Figure 5</xref>. Prior to the commencement of the experiment, participants were provided with a comprehensive briefing on the experimental procedures and necessary precautions. They were required to sign an informed consent form, signifying their voluntary agreement to participate in the study. Subsequently, each participant was outfitted with a photoelectric sensor on their left index finger for the acquisition of PPG signals, as well as electrodes to capture additional physiological signals. The PPG signals were recorded using a physiological signal monitor (ePM-12M, Mindray, China), which operated at a sampling rate of 125&#xa0;Hz. The video stimuli were presented on a computer screen positioned at a distance ranging from 0.5 to 1&#xa0;m from the participant, allowing them to view the content from their most comfortable seated position. Throughout the experiment, each participant was exposed to a total of 8 distinct video materials designed to elicit various emotional responses. Upon completion of each video segment, participants were prompted to complete the Self-Assessment Manikin (SAM) Emotion Scale, followed by a brief respite. The sequence of video presentation and scale assessment was orchestrated by a specially designed experimental program, enabling participants to independently execute all steps of the experimental process until its conclusion.</p>
<fig id="F5" position="float">
<label>FIGURE 5</label>
<caption>
<p>Experimental procedures.</p>
</caption>
<graphic xlink:href="fphys-16-1486763-g005.tif"/>
</fig>
<p>PPG signals are among the most accessible physiological signals for collection; however, they are susceptible to various sources of noise and interference that can complicate the acquisition process. Despite the implementation of hardware-level denoising techniques, the integrity of PPG signals can still be compromised by factors such as respiration, bodily movement, and issues related to data transmission. To ensure the acquisition of high-fidelity PPG signals, it is imperative to employ additional processing measures. In the context of this study, all PPG recordings underwent a series of processing techniques aimed at enhancing signal quality.</p>
<p>Firstly, the 1&#x2013;20&#xa0;Hz bandpass filter was used to remove most of the noise and interference caused by breathing, body movements, and other factors, thereby focusing the signal on high-density information regions. Due to the presence of high-frequency disturbances, each PPG record <bold>
<italic>y</italic>
</bold> was smoothed by <xref ref-type="disp-formula" rid="e1">Formula 1</xref> and the result was denoted as <bold>
<italic>y</italic>
</bold>
<sub>
<bold>
<italic>1</italic>
</bold>
</sub>, where <bold>
<italic>x</italic>
</bold>
<sub>
<bold>
<italic>i</italic>
</bold>
</sub> represented the sampling time of each point, <bold>
<italic>i</italic>
</bold> represented the sequence number of the data points, 3 &#x2264; <bold>
<italic>i</italic>
</bold> &#x2264; <bold>
<italic>n</italic>
</bold>-1, and <bold>
<italic>n</italic>
</bold> was the number of data points in the record.<disp-formula id="e1">
<mml:math id="m1">
<mml:mrow>
<mml:mfenced open="{" close="" separators="&#x7c;">
<mml:mrow>
<mml:mtable columnalign="left">
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x3d;</mml:mo>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mn>2</mml:mn>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x3d;</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x2b;</mml:mo>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mn>2</mml:mn>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x2b;</mml:mo>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mn>3</mml:mn>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
<mml:mn>3</mml:mn>
</mml:mfrac>
</mml:mrow>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x3d;</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>2</mml:mn>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x2b;</mml:mo>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x2b;</mml:mo>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x2b;</mml:mo>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>&#x2b;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x2b;</mml:mo>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>&#x2b;</mml:mo>
<mml:mn>2</mml:mn>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
<mml:mn>5</mml:mn>
</mml:mfrac>
</mml:mrow>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x3d;</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>2</mml:mn>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x2b;</mml:mo>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x2b;</mml:mo>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
<mml:mn>3</mml:mn>
</mml:mfrac>
</mml:mrow>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x3d;</mml:mo>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:math>
<label>(1)</label>
</disp-formula>
</p>
<p>Then, the method named &#x201c;moving-pane&#x201d; was used to identify fiducial points of each PPG record. This method created a pane with a specific width and moved it along the timeline of the PPG record. All maximum points in the pane and minimum points between every two maximum points were recorded during the movement and the maximum points that do not meet the following rules were excluded, as shown in <xref ref-type="fig" rid="F6">Figure 6A</xref>: 1) The distance between this point <bold>
<italic>P</italic>
</bold>
<sub>
<bold>
<italic>i</italic>
</bold>
</sub> and <bold>
<italic>P</italic>
</bold>
<sub>
<bold>
<italic>i-1</italic>
</bold>
</sub> less than 0.6s 2) The amplitude difference between this point <bold>
<italic>p</italic>
</bold>
<sub>
<bold>
<italic>i</italic>
</bold>
</sub> and <bold>
<italic>T</italic>
</bold>
<sub>
<bold>
<italic>i-1</italic>
</bold>
</sub> less than 0.5 times the amplitude difference between <bold>
<italic>P</italic>
</bold>
<sub>
<bold>
<italic>i-1</italic>
</bold>
</sub> and <bold>
<italic>T</italic>
</bold>
<sub>
<bold>
<italic>i-2</italic>
</bold>
</sub>, or the amplitude difference between <bold>
<italic>p</italic>
</bold>
<sub>
<bold>
<italic>i</italic>
</bold>
</sub> and <bold>
<italic>T</italic>
</bold>
<sub>
<bold>
<italic>i-1</italic>
</bold>
</sub> less than 0.5 times the amplitude difference between <bold>
<italic>p</italic>
</bold>
<sub>
<bold>
<italic>i&#x2b;1</italic>
</bold>
</sub> and <bold>
<italic>T</italic>
</bold>
<sub>
<bold>
<italic>i.</italic>
</bold>
</sub>
</p>
<fig id="F6" position="float">
<label>FIGURE 6</label>
<caption>
<p>
<bold>(A)</bold> The method &#x201c;moving-pane.&#x201d; <bold>(B)</bold> The method removing baseline drift.</p>
</caption>
<graphic xlink:href="fphys-16-1486763-g006.tif"/>
</fig>
<p>Following the application of the &#x201c;moving-pane&#x201d; method, a meticulous manual review was performed to identify and retain the extremum points within the PPG record, which correspond to the peaks and troughs that serve as the fiducial markers of the waveform. This process is crucial for the accurate characterization of the PPG signal&#x2019;s morphology. Subsequently, to address the issue of baseline drift that can distort the PPG signal, the method illustrated in <xref ref-type="fig" rid="F6">Figure 6B</xref> and encapsulated by <xref ref-type="disp-formula" rid="e2">Formula 2</xref> was employed. This technique effectively removes the baseline wander, ensuring that the origin of all individual PPG waveforms is recalibrated to zero. This normalization is essential for the consistent analysis and comparison of PPG signals across different recordings.<disp-formula id="e2">
<mml:math id="m2">
<mml:mrow>
<mml:mfenced open="{" close="" separators="&#x7c;">
<mml:mrow>
<mml:mtable columnalign="left">
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:mi>k</mml:mi>
<mml:mo>&#x3d;</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>U</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:msup>
<mml:mi>U</mml:mi>
<mml:mo>&#x2032;</mml:mo>
</mml:msup>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>U</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:msup>
<mml:mi>U</mml:mi>
<mml:mo>&#x2032;</mml:mo>
</mml:msup>
</mml:msub>
</mml:mrow>
</mml:mfrac>
</mml:mrow>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:mrow>
<mml:msup>
<mml:mi>y</mml:mi>
<mml:mo>&#x2032;</mml:mo>
</mml:msup>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x3d;</mml:mo>
<mml:mi>y</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>k</mml:mi>
<mml:mo>&#x2a;</mml:mo>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>U</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:math>
<label>(2)</label>
</disp-formula>
</p>
<p>In the concluding phase of signal processing, the z-score normalization technique was applied to eliminate the variability in the scale of each PPG record. This standardization procedure ensures that all records are brought onto a common scale, facilitating a unified analysis. The aforementioned processing methodologies were executed using a combination of self-devised algorithms and the scipy package (<xref ref-type="bibr" rid="B48">Virtanen et al., 2020</xref>), which is a fundamental component of the Python ecosystem for scientific computing. Post-processing, each PPG record was meticulously segmented in accordance with the distinct emotional stimuli that served as the triggers. Specifically, the records were divided into segments every 20&#xa0;s, with each segment annotated to reflect the predominant emotional state during that interval.</p>
</sec>
<sec id="s2-3">
<title>2.3 Features extraction and emotion analysis</title>
<p>Following the preprocessing of PPG signals, the data were segmented according to different emotional stimuli protocols. The continuous recordings were subsequently divided into 20-s epochs, with each epoch being annotated with corresponding emotional labels. This segmentation procedure yielded a total of 2,474 epochs for low arousal states, 5,560 epochs for high arousal states, 3,229 epochs for low valence states, and 4,805 epochs for high valence states, thereby establishing a comprehensive dataset for emotional state classification.</p>
<p>For each 20-s epoch, nine frequency-domain features (as specified in <xref ref-type="table" rid="T3">Table 3</xref>) were extracted. To account for inter-individual variability and other potential confounding factors, feature normalization was performed using a z-score-like transformation according to <xref ref-type="disp-formula" rid="e3">Equation 3</xref>. This standardization procedure ensures comparability across different subjects while preserving the relative distribution characteristics of the extracted features.<disp-formula id="e3">
<mml:math id="m3">
<mml:mrow>
<mml:msub>
<mml:mi>F</mml:mi>
<mml:mrow>
<mml:mi>r</mml:mi>
<mml:mi>e</mml:mi>
<mml:mi>m</mml:mi>
<mml:mi>o</mml:mi>
<mml:mi>v</mml:mi>
<mml:mi>e</mml:mi>
</mml:mrow>
</mml:msub>
<mml:mo>&#x3d;</mml:mo>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>F</mml:mi>
<mml:mrow>
<mml:mi>e</mml:mi>
<mml:mi>m</mml:mi>
<mml:mi>o</mml:mi>
<mml:mi>t</mml:mi>
<mml:mi>i</mml:mi>
<mml:mi>o</mml:mi>
<mml:mi>n</mml:mi>
</mml:mrow>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>F</mml:mi>
<mml:mrow>
<mml:mi>m</mml:mi>
<mml:mi>e</mml:mi>
<mml:mi>a</mml:mi>
<mml:mi>n</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>/</mml:mo>
<mml:msub>
<mml:mi>F</mml:mi>
<mml:mrow>
<mml:mi>s</mml:mi>
<mml:mi>t</mml:mi>
<mml:mi>d</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
<label>(3)</label>
</disp-formula>
</p>
<p>Among them, for specific feature <bold>
<italic>F</italic>
</bold> and a certain subject <bold>
<italic>S</italic>
</bold>, <bold>
<italic>F</italic>
</bold>
<sub>
<bold>
<italic>mean</italic>
</bold>
</sub> and <bold>
<italic>F</italic>
</bold>
<sub>
<bold>
<italic>std</italic>
</bold>
</sub> were the average and standard deviation of feature <bold>
<italic>F</italic>
</bold> for all sections of subject <bold>
<italic>S</italic>
</bold>, <bold>
<italic>F</italic>
</bold>
<sub>
<bold>
<italic>emotion</italic>
</bold>
</sub> was the value of feature <bold>
<italic>F</italic>
</bold> before processing for a specific emotional section of subject <bold>
<italic>S</italic>
</bold>, and <bold>
<italic>F</italic>
</bold>
<sub>
<bold>
<italic>remove</italic>
</bold>
</sub> was the processed feature value.</p>
<p>The Mann Whitney U-test (<xref ref-type="bibr" rid="B43">Rosner and Grove, 1999</xref>), a non-parametric statistical method, serves as a robust tool for assessing significant differences between two independent datasets, especially when the data does not meet the assumptions of parametric tests. In this study, the U-test, facilitated by Python&#x2019;s SciPy library (<xref ref-type="bibr" rid="B48">Virtanen et al., 2020</xref>), was employed to scrutinize the variability in the newly extracted PPG frequency-domain features across different emotional dimensions, specifically comparing the high and low arousal states, as well as the high and low valence states. The preliminary evaluation of the emotion-discriminative capability of the extracted PPG frequency-domain features was conducted through two complementary approaches: (1) statistical analysis using p-values derived from the two-tailed U-test, and (2) comparative examination of feature distribution patterns across different emotional states. This dual-method assessment framework provides robust evidence for evaluating the effectiveness of the proposed features in emotion differentiation.</p>
<p>Subsequently, based on the preliminary analysis, the identified emotion-discriminative features were utilized to construct a machine learning model for emotion recognition using Support Vector Machines (SVM). In this study, the SVM was implemented using the Scikit-learn algorithm package in Python (<xref ref-type="bibr" rid="B12">Fabian et al., 2011</xref>). To effectively partition the dataset, the <italic>train_test_split</italic> function from the Scikit-learn package was utilized, segregating the feature set into training and testing subsets with a ratio of 7:3. Given the modest size of the dataset, traditional cross-validation could potentially result in overfitting. Consequently, the study opted for a leave-one-point-out method for model training, a technique that iteratively excludes a single data point from the training process. This approach was iterated 100 times, ensuring a comprehensive assessment of the model&#x2019;s performance (<xref ref-type="bibr" rid="B36">Ma et al., 2023</xref>). The Area Under the Curve (AUC) of the Receiver Operating Characteristic (ROC) curve was computed for each iteration. The median AUC value, derived from the 100 models, was adopted as the representative performance metric for the SVM model.</p>
<p>ROC curves (<xref ref-type="bibr" rid="B42">Rodellar-Biarge et al., 2015</xref>) were employed to evaluate and visualize the discriminatory power of the features in identifying distinct emotional states. The model&#x2019;s predictive accuracy, the AUCs for the ROC curves, and the precision metric were computed to quantitatively assess the model&#x2019;s efficacy in recognizing emotions.</p>
</sec>
<sec id="s2-4">
<title>2.4 Feature comparison and cross dataset validation</title>
<p>To further validate the effectiveness of the extracted PPG frequency-domain features while maintaining the exclusive use of PPG as the sole physiological signal, we additionally extracted two well-established feature sets that have been extensively validated by numerous researchers for emotion recognition: pulse rate variability (PRV) features and PPG morphological features, as detailed in <xref ref-type="table" rid="T4">Table 4</xref>. Following the same analytical protocol applied to the frequency-domain features, these comparative features underwent preliminary screening before being utilized to construct SVM-based machine learning models, thereby obtaining their respective emotion recognition accuracy metrics for systematic comparison. Furthermore, to investigate the underlying relationships among different feature sets, we conducted correlation analysis between the PPG frequency-domain features and the two additional feature groups. This analysis facilitated the development of an integrated feature set through optimal feature fusion, potentially enhancing the overall emotion recognition performance.</p>
<table-wrap id="T4" position="float">
<label>TABLE 4</label>
<caption>
<p>Features for comparison.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Category</th>
<th align="center">Features</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">PRV</td>
<td align="center">MeanNN, SDNN, RMSNN, MedianNN, RDNN, IQRNN, CVNN, SDSD, RMSSD, CVSD, pNN20, ApEn, FuzzyEn, LZC</td>
</tr>
<tr>
<td align="center">Morphological Features</td>
<td align="center">Amp_Diff, Interval_Rise, Interval_Drop, Slope_Rise, Slope_Max_Rise, Slope_Drop, Slope_Min_Drop, Area_Rise, Area_Drop, Area_Total, Area_Rise_rate, Area_Drop_rate, Area_Total_rate, Area_RD_rate</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>To ascertain the efficacy and generalizability of the methodologies and features delineated in this research, an expanded dataset of PPG recordings was sourced from the DEAP database, which is publicly accessible. The DEAP dataset (<xref ref-type="bibr" rid="B29">Koelstra et al., 2012</xref>) serves as a multimodal repository for affective analysis, encompassing physiological signal recordings from 32 participants exposed to 40 distinct video stimuli. For the purpose of this analysis, PPG recordings were exclusively selected. Consistent with the methodology applied to the aforementioned dataset, participants were prompted to rate the arousal, valence, and additional pertinent attributes of each video stimulus. The models established in the preceding section were then applied to discern the emotional states associated with the DEAP dataset entries. Subsequently, the derived accuracy metrics were utilized to assess the models&#x2019; performance in emotion recognition and their adaptability across different datasets.</p>
</sec>
</sec>
<sec sec-type="results" id="s3">
<title>3 Results</title>
<p>
<xref ref-type="fig" rid="F7">Figure 7</xref> presents the distribution patterns of the nine extracted PPG frequency-domain features across different emotional states, categorized by high/low arousal and high/low valence. The corresponding p-values derived from U-tests, which indicate the statistical significance of differences between high and low arousal states as well as between high and low valence states, are also displayed. The analysis reveals distinct patterns of feature variations in response to different emotional dimensions.</p>
<fig id="F7" position="float">
<label>FIGURE 7</label>
<caption>
<p>The distribution of Peaks-based frequency-domain features under different emotional states. Note: &#x2a;represents p &#x3c; 0.05, &#x2a;&#x2a; represents p &#x3c; 0.01, &#x2a;&#x2a;&#x2a; represents p &#x3c; 0.001.</p>
</caption>
<graphic xlink:href="fphys-16-1486763-g007.tif"/>
</fig>
<p>Regarding arousal levels, we observe synchronous increases in both the fundamental frequency and the first harmonic frequency, while the ratio-based features remain relatively stable. Notably, features associated with the second harmonic frequency demonstrate significant enhancement, indirectly reflecting the overall increase in total power during heightened arousal states. This observed phenomenon can be primarily attributed to the physiological correlates of increased peripheral vascular resistance and enhanced blood flow intensity. These physiological changes are consistent with the characteristic manifestations of heightened arousal states, which typically involve muscle tension, vasoconstriction, and intensified cardiac activity resulting from emotional excitation (<xref ref-type="bibr" rid="B47">Shu et al., 2018</xref>).</p>
<p>In contrast, valence levels exhibit a different pattern of influence: while both the fundamental frequency and the first harmonic frequency show moderate increases (with the first harmonic demonstrating more pronounced enhancement), the second harmonic frequency displays a marked decrease. Interestingly, the total power remains relatively unaffected by changes in valence. This observation is strongly associated with the fundamental nature of valence as a psychophysiological dimension that primarily reflects the distinction between positive and negative affective states (<xref ref-type="bibr" rid="B14">Gendolla and Krusken, 2001</xref>; <xref ref-type="bibr" rid="B13">Fairclough et al., 2014</xref>).</p>
<p>The analysis clearly demonstrates that the PPG frequency-domain features exhibit significant sensitivity to both arousal and valence variations, as evidenced by their systematic changes corresponding to different emotional states. To further evaluate the effectiveness of these features in emotion recognition, we conducted a comparative analysis with two well-established feature sets: PRV features and PPG morphological features. <xref ref-type="fig" rid="F8">Figure 8</xref> presents the results of intra-group and inter-group correlation analyses among these three feature sets.</p>
<fig id="F8" position="float">
<label>FIGURE 8</label>
<caption>
<p>Results of intra-group and inter-group orrelation analyses.</p>
</caption>
<graphic xlink:href="fphys-16-1486763-g008.tif"/>
</fig>
<p>The correlation matrix reveals several important patterns: (1) The PPG frequency-domain features maintain relatively low intra-group correlations, suggesting their complementary nature in capturing different aspects of emotional states. (2) Moderate correlations between frequency-domain features and morphological features indicate that the spectral information partially reflects certain morphological characteristics of PPG signals. (3) Both PRV and morphological feature sets exhibit substantially higher intra-group correlations compared to the frequency-domain features, indicating greater redundancy within these conventional feature sets. This comparative analysis suggests that the proposed frequency-domain features offer a more diverse and potentially more efficient representation of emotional states.</p>
<p>The comparative results of feature performance are presented in <xref ref-type="table" rid="T5">Table 5</xref> and <xref ref-type="fig" rid="F9">Figure 9</xref>. Among the three feature sets, the proposed PPG frequency-domain features demonstrated superior performance in machine learning models, achieving an accuracy of 87.5% in arousal classification and 81.4% in valence classification. The higher accuracy in arousal classification aligns with the more pronounced feature distribution differences observed in arousal states, as previously discussed. The PPG morphological features also showed reasonable discriminative capability, while the PRV features exhibited relatively poor performance. The suboptimal performance of PRV features may be attributed to two potential factors: (1) The exclusion of traditionally effective features such as LF and HF components, which could not be accurately computed due to the short duration (20s) of individual epochs; (2) The high intra-feature correlation within the PRV feature set, resulting in substantial redundancy despite the large number of features, effectively reducing the dimensionality of useful information.</p>
<table-wrap id="T5" position="float">
<label>TABLE 5</label>
<caption>
<p>The accuracy infornmation of models.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Features</th>
<th align="center">Dataset</th>
<th align="center">Emotion</th>
<th align="center">Accuracy</th>
<th align="center">AUC</th>
<th align="center">Precision</th>
<th align="center">Sensitivity</th>
<th align="center">Specificity</th>
<th align="center">F1-score</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">Combination</td>
<td rowspan="8" align="center">Mine</td>
<td rowspan="4" align="center">LA-HA</td>
<td align="center">91.0%</td>
<td align="center">0.961</td>
<td align="center">0.972</td>
<td align="center">0.898</td>
<td align="center">0.939</td>
<td align="center">0.933</td>
</tr>
<tr>
<td align="center">Frequency-Domain</td>
<td align="center">87.5%</td>
<td align="center">0.940</td>
<td align="center">0.980</td>
<td align="center">0.831</td>
<td align="center">0.965</td>
<td align="center">0.899</td>
</tr>
<tr>
<td align="center">Morphology</td>
<td align="center">84.4%</td>
<td align="center">0.929</td>
<td align="center">0.978</td>
<td align="center">0.784</td>
<td align="center">0.965</td>
<td align="center">0.871</td>
</tr>
<tr>
<td align="center">PRV</td>
<td align="center">75.7%</td>
<td align="center">0.827</td>
<td align="center">0.873</td>
<td align="center">0.747</td>
<td align="center">0.778</td>
<td align="center">0.805</td>
</tr>
<tr>
<td align="center">Combination</td>
<td rowspan="4" align="center">LV-HV</td>
<td align="center">85.9%</td>
<td align="center">0.916</td>
<td align="center">0.881</td>
<td align="center">0.893</td>
<td align="center">0.804</td>
<td align="center">0.887</td>
</tr>
<tr>
<td align="center">Frequency-Domain</td>
<td align="center">81.4%</td>
<td align="center">0.852</td>
<td align="center">0.899</td>
<td align="center">0.814</td>
<td align="center">0.814</td>
<td align="center">0.854</td>
</tr>
<tr>
<td align="center">Morphology</td>
<td align="center">75.9%</td>
<td align="center">0.824</td>
<td align="center">0.895</td>
<td align="center">0.743</td>
<td align="center">0.797</td>
<td align="center">0.812</td>
</tr>
<tr>
<td align="center">PRV</td>
<td align="center">71.1%</td>
<td align="center">0.747</td>
<td align="center">0.757</td>
<td align="center">0.799</td>
<td align="center">0.560</td>
<td align="center">0.778</td>
</tr>
<tr>
<td align="center">Combination</td>
<td rowspan="8" align="center">DEAP</td>
<td rowspan="4" align="center">LA-HA</td>
<td align="center">79.3%</td>
<td align="center">0.784</td>
<td align="center">0.771</td>
<td align="center">0.915</td>
<td align="center">0.628</td>
<td align="center">0.836</td>
</tr>
<tr>
<td align="center">Frequency-Domain</td>
<td align="center">73.5%</td>
<td align="center">0.741</td>
<td align="center">0.709</td>
<td align="center">0.881</td>
<td align="center">0.555</td>
<td align="center">0.786</td>
</tr>
<tr>
<td align="center">Morphology</td>
<td align="center">72.6%</td>
<td align="center">0.739</td>
<td align="center">0.698</td>
<td align="center">0.880</td>
<td align="center">0.540</td>
<td align="center">0.778</td>
</tr>
<tr>
<td align="center">PRV</td>
<td align="center">70.6%</td>
<td align="center">0.714</td>
<td align="center">0.682</td>
<td align="center">0.855</td>
<td align="center">0.530</td>
<td align="center">0.759</td>
</tr>
<tr>
<td align="center">Combination</td>
<td rowspan="4" align="center">LV-HV</td>
<td align="center">75.9%</td>
<td align="center">0.762</td>
<td align="center">0.738</td>
<td align="center">0.882</td>
<td align="center">0.602</td>
<td align="center">0.804</td>
</tr>
<tr>
<td align="center">Frequency-Domain</td>
<td align="center">70.9%</td>
<td align="center">0.707</td>
<td align="center">0.694</td>
<td align="center">0.848</td>
<td align="center">0.539</td>
<td align="center">0.763</td>
</tr>
<tr>
<td align="center">Morphology</td>
<td align="center">71.5%</td>
<td align="center">0.718</td>
<td align="center">0.686</td>
<td align="center">0.872</td>
<td align="center">0.530</td>
<td align="center">0.768</td>
</tr>
<tr>
<td align="center">PRV</td>
<td align="center">69.5%</td>
<td align="center">0.699</td>
<td align="center">0.676</td>
<td align="center">0.842</td>
<td align="center">0.520</td>
<td align="center">0.750</td>
</tr>
</tbody>
</table>
</table-wrap>
<fig id="F9" position="float">
<label>FIGURE 9</label>
<caption>
<p>ROC curves for distinguishing <bold>(A)</bold> arousal levels; <bold>(B)</bold> valence levels.</p>
</caption>
<graphic xlink:href="fphys-16-1486763-g009.tif"/>
</fig>
<p>To further validate the generalizability of our methodology, we replicated the analytical procedure on PPG signals from the DEAP dataset, maintaining identical processing pipelines and model construction approaches. The comparative results, as presented in <xref ref-type="table" rid="T5">Table 5</xref>, demonstrate remarkable consistency with our proprietary dataset findings, though with marginally reduced classification accuracy in the DEAP dataset. This performance variation can be attributed to multiple factors, including the use of default SVM parameters without specific optimization and inherent differences in emotional elicitation protocols between studies.</p>
<p>Capitalizing on the observed low inter-group feature correlations, we implemented a comprehensive feature fusion strategy. This integrated approach yielded exceptional emotion recognition performance, achieving accuracy rates surpassing 90% on our proprietary dataset. These findings not only confirm the standalone efficacy of the proposed PPG frequency-domain features in emotion recognition but also establish their crucial role as a fundamental component in advanced emotion recognition systems. The features&#x2019; unique complementary characteristics make them an indispensable element in the pursuit of enhanced recognition performance, serving as a critical piece in the development of more sophisticated emotion classification frameworks.</p>
</sec>
<sec sec-type="discussion" id="s4">
<title>4 Discussion</title>
<p>This study employs a physiological model-based simulation approach to systematically analyze the frequency-domain components of PPG signals and extract their essential characteristics. Through this comprehensive investigation, we examine the efficacy of these frequency-domain features in effectively discriminating emotional states. Furthermore, the research elucidates the intricate relationships between physiological parameters and emotional states, as well as the connections between PPG frequency-domain features and emotional responses, thereby providing a deeper understanding of the psychophysiological mechanisms underlying emotion recognition.</p>
<p>Through comprehensive investigation of PPG frequency-domain features, we have identified significant correlations between these features and various physiological parameters, including peripheral vascular resistance, blood flow inertia, and vascular compliance. Moreover, these features demonstrate remarkable sensitivity to variations in both arousal and valence levels, thereby establishing a crucial tripartite relationship among physiological parameters, PPG frequency-domain characteristics, and emotional states. These findings provide valuable psychophysiological foundations and references for subsequent emotion recognition analyses based on PPG frequency-domain features.</p>
<p>An intriguing observation from both our proprietary dataset and the DEAP dataset reveals that PPG frequency-domain features exhibit greater sensitivity to arousal levels compared to valence. This phenomenon may be attributed to the more pronounced cardiovascular changes associated with emotional intensity (arousal) rather than emotional polarity (positive/negative valence), suggesting that the autonomic nervous system&#x2019;s response to emotional arousal might be more substantial and detectable through PPG analysis.</p>
<p>In comparison with existing studies, as summarized in <xref ref-type="table" rid="T6">Table 6</xref>, research utilizing PPG signals for emotion recognition remains relatively scarce, with even fewer studies employing PPG as the primary or exclusive physiological modality. When considering variations in emotional elicitation materials and differing analytical focuses across studies, our research demonstrates competitive emotion recognition accuracy, positioning itself within the upper-middle range of existing literature. This represents a significant achievement in the field. Notably, findings from other researchers corroborate our observations regarding the suboptimal performance of PRV features and the relatively better performance of morphological features. However, what distinguishes our study is the establishment of a comprehensive theoretical framework that bridges physiological parameters, PPG frequency-domain features, and emotional states. This tripartite model provides robust psychophysiological evidence supporting the use of PPG frequency-domain features for emotion recognition, thereby advancing our understanding of the underlying mechanisms and offering a solid theoretical foundation for future research in this domain.</p>
<table-wrap id="T6" position="float">
<label>TABLE 6</label>
<caption>
<p>Comparisons with other studies.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">References</th>
<th align="center">Signals</th>
<th align="center">Scourse</th>
<th align="center">Features (PPG) or method</th>
<th align="center">Accuracy or result</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">
<xref ref-type="bibr" rid="B53">Lee et al. (2019)</xref>
</td>
<td align="center">PPG</td>
<td align="center">DEAP</td>
<td align="center">Features extracted through CNN</td>
<td align="center">Valence: 75.3%<break/>Arousal: 76.2%</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B54">Choi and Kim (2018)</xref>
</td>
<td align="center">EEG, PPG, Video</td>
<td align="center">DEAP</td>
<td align="center">LSTM</td>
<td align="center">Valence: 78%<break/>Arousal: 74.65%</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B40">Paul et al. (2024)</xref>
</td>
<td align="center">PPG</td>
<td align="center">DEAP</td>
<td align="center">A New Morphological Feature</td>
<td align="center">Special emotions: 97.78%</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B4">Beckmann et al. (2019)</xref>
</td>
<td align="center">PPG</td>
<td align="center">Experiment</td>
<td align="center">Pulse Transit Time (PTT)</td>
<td align="center">Significant Effectiveness</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B35">Li et al. (2017)</xref>
</td>
<td align="center">PPG</td>
<td align="center">Experiment</td>
<td align="center">PRV, <break/>Morphological Features</td>
<td align="center">1) Morphological features superior to PRV features<break/>2) Frequency domain features (LF, etc.) superior to time domain features (SDNN, etc.) in PRV</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B49">Wang and Yu (2021)</xref>
</td>
<td align="center">PPG</td>
<td align="center">Experiment</td>
<td align="center">Deap Learning</td>
<td align="center">Two classes: 89.15%<break/>Four classes: 84.70%</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B27">Kang and Kim (2022)</xref>
</td>
<td align="center">PPG, GSR</td>
<td align="center">DEAP, MERTI-Apps</td>
<td align="center">Deap Learning</td>
<td align="center">Valence: 73.49%<break/>Arousal: 77.87%</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B52">Yang et al. (2024)</xref>
</td>
<td align="center">PPG, GSR, EEG, Viedo</td>
<td align="center">Experiment</td>
<td align="center">PRV, <break/>Morphological Features</td>
<td align="center">Positive-Negative: 80.96%</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B32">Lee et al. (2020)</xref>
</td>
<td align="center">PPG</td>
<td align="center">DEAP</td>
<td align="center">PRV, Features extracted through CNN</td>
<td align="center">Valence: 82.1%<break/>Arousal: 80.9%</td>
</tr>
<tr>
<td align="center">This Study</td>
<td align="center">PPG</td>
<td align="center">Experiment, DEAP</td>
<td align="center">Frequency Domain Features, PRV, Morphological Features</td>
<td align="center">(Experiment) Valence: 91.0%<break/>(Experiment) Arousal: 85.9%<break/>(DEAP) Valence: 75.9%<break/>(DEAP) Arousal: 79.3%</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Nevertheless, this study is subject to several limitations that warrant consideration. First, while we have established a tripartite framework connecting physiological parameters, PPG frequency-domain features, and emotional states, the current analysis primarily demonstrates their strong associations rather than establishing precise quantitative correlations. This limitation highlights the need for more sophisticated modeling approaches to quantify these relationships. Secondly, the selection of emotional elicitation materials presents inherent challenges. The material-specific characteristics sometimes exert a more substantial influence on the extracted features than the emotional states themselves. Additionally, the duration of stimulus materials significantly impacts feature selection and interpretation, a methodological concern that persists across numerous studies in this field. Finally, our research primarily focused on feature analysis and interpretation, with relatively less emphasis on optimization for classification accuracy. This methodological orientation, while providing valuable insights into feature characteristics, has resulted in classification performance that, while respectable, leaves room for improvement. Future studies should aim to strike a better balance between feature exploration and recognition performance optimization.</p>
</sec>
<sec sec-type="conclusion" id="s5">
<title>5 Conclusion</title>
<p>This study employs a physiology model-based simulation approach to systematically analyze the frequency-domain components of PPG signals and extract their essential characteristics. Through comprehensive investigation, we examine the efficacy of these frequency-domain features in effectively discriminating emotional states. Furthermore, the research elucidates the intricate relationships between physiological parameters, frequency-domain characteristics, and emotional states, thereby providing deeper insights into the psychophysiological mechanisms underlying emotion recognition. These findings establish a solid theoretical foundation and offer valuable references for subsequent research in this field.</p>
</sec>
</body>
<back>
<sec sec-type="data-availability" id="s6">
<title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec sec-type="ethics-statement" id="s7">
<title>Ethics statement</title>
<p>The studies involving humans were approved by this study was performed in accordance with the Nurenberg Code. This human study was approved by Zhejiang University Psychological Ethics Review-approval: [2022] No. 059. All adult participants provided written informed consent to participate in this study. The studies were conducted in accordance with the local legislation and institutional requirements. The participants provided their written informed consent to participate in this study.</p>
</sec>
<sec sec-type="author-contributions" id="s8">
<title>Author contributions</title>
<p>ZZ: Conceptualization, Methodology, Resources, Software, Writing&#x2013;original draft. XW: Conceptualization, Investigation, Methodology, Validation, Writing&#x2013;original draft. YX: Formal Analysis, Validation, Writing&#x2013;review and editing. WC: Resources, Validation, Writing&#x2013;review and editing. JZ: Supervision, Validation, Writing&#x2013;review and editing. SC: Conceptualization, Supervision, Writing&#x2013;review and editing. HC: Project administration, Resources, Supervision, Writing&#x2013;review and editing.</p>
</sec>
<sec sec-type="funding-information" id="s9">
<title>Funding</title>
<p>The author(s) declare that no financial support was received for the research, authorship, and/or publication of this article.</p>
</sec>
<ack>
<p>The authors would like to thank Yimin Shen, Laboratory leader of College of Biomedical Engineering and Instrument Sciences at Zhejiang University (ZJU, China) for kind help in this work.</p>
</ack>
<sec sec-type="COI-statement" id="s10">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="disclaimer" id="s11">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<sec id="s12">
<title>Supplementary material</title>
<p>The Supplementary Material for this article can be found online at: <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/articles/10.3389/fphys.2025.1486763/full#supplementary-material">https://www.frontiersin.org/articles/10.3389/fphys.2025.1486763/full&#x23;supplementary-material</ext-link>
</p>
<supplementary-material xlink:href="Table1.docx" id="SM1" mimetype="application/docx" xmlns:xlink="http://www.w3.org/1999/xlink"/>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Abadi</surname>
<given-names>M. K.</given-names>
</name>
<name>
<surname>Subramanian</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Kia</surname>
<given-names>S. M.</given-names>
</name>
<name>
<surname>Avesani</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Patras</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Sebe</surname>
<given-names>N.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>DECAF: MEG-based multimodal database for decoding affective physiological responses</article-title>. <source>IEEE T. Affect. Comput.</source> <volume>6</volume>, <fpage>209</fpage>&#x2013;<lpage>222</lpage>. <pub-id pub-id-type="doi">10.1109/TAFFC.2015.2392932</pub-id>
</citation>
</ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Alvarez-Jimenez</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Calle-Jimenez</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Hernandez-Alvarez</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>A comprehensive evaluation of features and simple machine learning algorithms for electroencephalographic-based emotion recognition</article-title>. <source>Appl. SCIENCES-BASEL</source> <volume>14</volume>, <fpage>2228</fpage>. <pub-id pub-id-type="doi">10.3390/app14062228</pub-id>
</citation>
</ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Anthony</surname>
<given-names>A. A.</given-names>
</name>
<name>
<surname>Patil</surname>
<given-names>C. M.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Speech emotion recognition systems: a comprehensive review on different methodologies</article-title>. <source>Wirel. Pers. Commun.</source> <volume>130</volume>, <fpage>515</fpage>&#x2013;<lpage>525</lpage>. <pub-id pub-id-type="doi">10.1007/s11277-023-10296-5</pub-id>
</citation>
</ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Beckmann</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Viga</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Doganguen</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Grabmaier</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Measurement and analysis of local pulse transit time for emotion recognition</article-title>. <source>IEEE Sens. J.</source> <volume>19</volume>, <fpage>7683</fpage>&#x2013;<lpage>7692</lpage>. <pub-id pub-id-type="doi">10.1109/JSEN.2019.2915529</pub-id>
</citation>
</ref>
<ref id="B5">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bradley</surname>
<given-names>M. M.</given-names>
</name>
<name>
<surname>Lang</surname>
<given-names>P. J.</given-names>
</name>
</person-group> (<year>1994</year>). <article-title>Measuring emotion: the self-assessment Manikin and the semantic differential</article-title>. <source>J. Behav. Ther. Exp. Psychiatry</source> <volume>25</volume>, <fpage>49</fpage>&#x2013;<lpage>59</lpage>. <pub-id pub-id-type="doi">10.1016/0005-7916(94)90063-9</pub-id>
</citation>
</ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Button</surname>
<given-names>K. S.</given-names>
</name>
<name>
<surname>Lewis</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Munafo</surname>
<given-names>M. R.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>Understanding emotion: lessons from anxiety</article-title>. <source>Behav. Brain Sci.</source> <volume>35</volume>, <fpage>145</fpage>. <pub-id pub-id-type="doi">10.1017/S0140525X11001464</pub-id>
</citation>
</ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chakraborty</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Sadhukhan</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Pal</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Mitra</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>PPG-Based automated estimation of blood pressure using patient-specific neural network modeling</article-title>. <source>J. Mech. Med. Biol.</source> <volume>20</volume>, <fpage>2050037</fpage>. <pub-id pub-id-type="doi">10.1142/S0219519420500372</pub-id>
</citation>
</ref>
<ref id="B54">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Choi</surname>
<given-names>E. J.</given-names>
</name>
<name>
<surname>Kim</surname>
<given-names>D. K.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Arousal and valence classification model based on long short-term memory and DEAP data for mental healthcare management</article-title>. <source>Healthc. Inform. Res.</source> <volume>24</volume> (<issue>4</issue>), <fpage>309</fpage>&#x2013;<lpage>316</lpage>. <pub-id pub-id-type="doi">10.4258/hir.2018.24.4.309</pub-id>
</citation>
</ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Davydov</surname>
<given-names>D. M.</given-names>
</name>
<name>
<surname>Zech</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Luminet</surname>
<given-names>O.</given-names>
</name>
</person-group> (<year>2011</year>). <article-title>Affective context of sadness and physiological response patterns</article-title>. <source>J. Psychophysiol.</source> <volume>25</volume>, <fpage>67</fpage>&#x2013;<lpage>80</lpage>. <pub-id pub-id-type="doi">10.1027/0269-8803/a000031</pub-id>
</citation>
</ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Dhara</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Singh</surname>
<given-names>P. K.</given-names>
</name>
<name>
<surname>Mahmud</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>A fuzzy ensemble-based deep learning model for EEG-based emotion recognition</article-title>. <source>Cogn. Comput.</source> <volume>16</volume>, <fpage>1364</fpage>&#x2013;<lpage>1378</lpage>. <pub-id pub-id-type="doi">10.1007/s12559-023-10171-2</pub-id>
</citation>
</ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Drummond</surname>
<given-names>P. D.</given-names>
</name>
</person-group> (<year>1999</year>). <article-title>Facial flushing during provocation in women</article-title>. <source>Psychophysiology</source> <volume>36</volume>, <fpage>325</fpage>&#x2013;<lpage>332</lpage>. <pub-id pub-id-type="doi">10.1017/S0048577299980344</pub-id>
</citation>
</ref>
<ref id="B11">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ekman</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Friesen</surname>
<given-names>W. V.</given-names>
</name>
</person-group> (<year>1971</year>). <article-title>Constants across cultures in the face and emotion</article-title>. <source>J. Of Personality And Soc. Psychol.</source> <volume>17</volume>, <fpage>124</fpage>. <lpage>129</lpage>. <pub-id pub-id-type="doi">10.1037/h0030377</pub-id>
</citation>
</ref>
<ref id="B12">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fabian</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Ga&#xeb;l</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Alexandre</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Vincent</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Bertrand</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Olivier</surname>
<given-names>G.</given-names>
</name>
<etal/>
</person-group> (<year>2011</year>). <article-title>Scikit-learn: machine learning in Python</article-title>. <source>J. Mach. Learn. Res.</source> <volume>12</volume>, <fpage>2825</fpage>&#x2013;<lpage>2830</lpage>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://scikit-learn.org/stable/">https://scikit-learn.org/stable/</ext-link>
</comment>
</citation>
</ref>
<ref id="B13">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fairclough</surname>
<given-names>S. H.</given-names>
</name>
<name>
<surname>van der Zwaag</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Spiridon</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Westerink</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Effects of mood induction via music on cardiovascular measures of negative emotion during simulated driving</article-title>. <source>Physiol. Behav.</source> <volume>129</volume>, <fpage>173</fpage>&#x2013;<lpage>180</lpage>. <pub-id pub-id-type="doi">10.1016/j.physbeh.2014.02.049</pub-id>
</citation>
</ref>
<ref id="B14">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gendolla</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Krusken</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2001</year>). <article-title>Mood state and cardiovascular response in active coping with an affect-regulative challenge</article-title>. <source>Int. J. Psychophysiol.</source> <volume>41</volume>, <fpage>169</fpage>&#x2013;<lpage>180</lpage>. <pub-id pub-id-type="doi">10.1016/S0167-8760(01)00130-1</pub-id>
</citation>
</ref>
<ref id="B15">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Goldwyn</surname>
<given-names>R. M.</given-names>
</name>
<name>
<surname>Watt</surname>
<given-names>T. B.</given-names>
</name>
</person-group> (<year>1967</year>). <article-title>Arterial pressure pulse contour analysis via a mathematical model for clinical quantification of human vascular properties</article-title>. <source>IEEE T. Bio.-Med. Eng.</source> <volume>BM14</volume>, <fpage>11</fpage>. <pub-id pub-id-type="doi">10.1109/TBME.1967.4502455</pub-id>
</citation>
</ref>
<ref id="B16">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gordan</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Gwathmey</surname>
<given-names>J. K.</given-names>
</name>
<name>
<surname>Xie</surname>
<given-names>L. H.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Autonomic and endocrine control of cardiovascular function</article-title>. <source>World J. Cardiol.</source> <volume>7</volume>, <fpage>204</fpage>&#x2013;<lpage>214</lpage>. <pub-id pub-id-type="doi">10.4330/wjc.v7.i4.204</pub-id>
</citation>
</ref>
<ref id="B17">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Goshvarpour</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Abbasi</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Goshvarpour</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>An accurate emotion recognition system using ECG and GSR signals and matching pursuit method</article-title>. <source>Biomed. J.</source> <volume>40</volume>, <fpage>355</fpage>&#x2013;<lpage>368</lpage>. <pub-id pub-id-type="doi">10.1016/j.bj.2017.11.001</pub-id>
</citation>
</ref>
<ref id="B18">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gou</surname>
<given-names>X. Y.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>Y. X.</given-names>
</name>
<name>
<surname>Guo</surname>
<given-names>L. X.</given-names>
</name>
<name>
<surname>Zhao</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Zhong</surname>
<given-names>D. L.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>X. B.</given-names>
</name>
<etal/>
</person-group> (<year>2023</year>). <article-title>The conscious processing of emotion in depression disorder: a meta-analysis of neuroimaging studies</article-title>. <source>Front. Psychiatry</source> <volume>14</volume>, <fpage>1099426</fpage>. <pub-id pub-id-type="doi">10.3389/fpsyt.2023.1099426</pub-id>
</citation>
</ref>
<ref id="B19">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Harms</surname>
<given-names>M. B.</given-names>
</name>
<name>
<surname>Martin</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Wallace</surname>
<given-names>G. L.</given-names>
</name>
</person-group> (<year>2010</year>). <article-title>Facial emotion recognition in autism spectrum disorders: a review of behavioral and neuroimaging studies</article-title>. <source>Neuropsychol. Rev.</source> <volume>20</volume>, <fpage>290</fpage>&#x2013;<lpage>322</lpage>. <pub-id pub-id-type="doi">10.1007/s11065-010-9138-6</pub-id>
</citation>
</ref>
<ref id="B20">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Harris</surname>
<given-names>K. F.</given-names>
</name>
<name>
<surname>Matthews</surname>
<given-names>K. A.</given-names>
</name>
</person-group> (<year>2004</year>). <article-title>Interactions between autonomic nervous system activity and endothelial function: a model for the development of cardiovascular disease</article-title>. <source>Psychosom. Med.</source> <volume>66</volume>, <fpage>153</fpage>&#x2013;<lpage>164</lpage>. <pub-id pub-id-type="doi">10.1097/01.psy.0000116719.95524.e2</pub-id>
</citation>
</ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hasan</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Rundensteiner</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Agu</surname>
<given-names>E.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Automatic emotion detection in text streams by analyzing Twitter data</article-title>. <source>Int. J. DATA Sci. Anal.</source> <volume>7</volume>, <fpage>35</fpage>&#x2013;<lpage>51</lpage>. <pub-id pub-id-type="doi">10.1007/s41060-018-0096-z</pub-id>
</citation>
</ref>
<ref id="B22">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hsu</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Chiang</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Hung</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Automatic ECG-based emotion recognition in music listening</article-title>. <source>IEEE T. Affect. Comput.</source> <volume>11</volume>, <fpage>85</fpage>&#x2013;<lpage>99</lpage>. <pub-id pub-id-type="doi">10.1109/TAFFC.2017.2781732</pub-id>
</citation>
</ref>
<ref id="B23">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Issa</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Peng</surname>
<given-names>Q.</given-names>
</name>
<name>
<surname>You</surname>
<given-names>X.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Emotion classification using EEG brain signals and the broad learning system</article-title>. <source>IEEE T. Syst. Man. Cy.-S.</source> <volume>51</volume>, <fpage>7382</fpage>&#x2013;<lpage>7391</lpage>. <pub-id pub-id-type="doi">10.1109/TSMC.2020.2969686</pub-id>
</citation>
</ref>
<ref id="B24">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jang</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Park</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Kim</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Chung</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Park</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Sohn</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Data from: emotion classification based on bio-signals emotion recognition using machine learning algorithms</article-title>. <fpage>1373</fpage>, <lpage>1376</lpage>. <pub-id pub-id-type="doi">10.1109/InfoSEEE.2014.6946144</pub-id>
</citation>
</ref>
<ref id="B25">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Johnstone</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>1971</year>). <article-title>The effects of oral sedatives on the vasoconstrictive reaction to fear</article-title>. <source>Brit. J. Anaesth.</source> <volume>43</volume>, <fpage>365</fpage>&#x2013;<lpage>379</lpage>. <pub-id pub-id-type="doi">10.1093/bja/43.4.365-a</pub-id>
</citation>
</ref>
<ref id="B26">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Joshi</surname>
<given-names>V. M.</given-names>
</name>
<name>
<surname>Ghongade</surname>
<given-names>R. B.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>EEG based emotion detection using fourth order spectral moment and deep learning</article-title>. <source>Biomed. Signal Proces.</source> <volume>68</volume>, <fpage>102755</fpage>. <pub-id pub-id-type="doi">10.1016/j.bspc.2021.102755</pub-id>
</citation>
</ref>
<ref id="B27">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kang</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Kim</surname>
<given-names>D.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>1D convolutional autoencoder-based PPG and GSR signals for real-time emotion classification</article-title>. <source>IEEE ACCESS</source> <volume>10</volume>, <fpage>91332</fpage>&#x2013;<lpage>91345</lpage>. <pub-id pub-id-type="doi">10.1109/ACCESS.2022.3201342</pub-id>
</citation>
</ref>
<ref id="B28">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kleiger</surname>
<given-names>R. E.</given-names>
</name>
<name>
<surname>Stein</surname>
<given-names>P. K.</given-names>
</name>
<name>
<surname>Bigger</surname>
<given-names>J. J.</given-names>
</name>
</person-group> (<year>2005</year>). <article-title>Heart rate variability: measurement and clinical utility</article-title>. <source>Ann. Noninvasive Electrocardiol.</source> <volume>10</volume>, <fpage>88</fpage>&#x2013;<lpage>101</lpage>. <pub-id pub-id-type="doi">10.1111/j.1542-474X.2005.10101.x</pub-id>
</citation>
</ref>
<ref id="B29">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Koelstra</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Muhl</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Soleymani</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Lee</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Yazdani</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Ebrahimi</surname>
<given-names>T.</given-names>
</name>
<etal/>
</person-group> (<year>2012</year>). <article-title>DEAP: a database for emotion analysis using physiological signals</article-title>. <source>IEEE T. Affect. Comput.</source> <volume>3</volume>, <fpage>18</fpage>&#x2013;<lpage>31</lpage>. <pub-id pub-id-type="doi">10.1109/T-AFFC.2011.15</pub-id>
</citation>
</ref>
<ref id="B30">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Krumhansl</surname>
<given-names>C. L.</given-names>
</name>
</person-group> (<year>1997</year>). <article-title>An exploratory study of musical emotions and psychophysiology</article-title>. <source>Can. J. Exp. Psychol.</source> <volume>51</volume>, <fpage>336</fpage>&#x2013;<lpage>353</lpage>. <pub-id pub-id-type="doi">10.1037/1196-1961.51.4.336</pub-id>
</citation>
</ref>
<ref id="B31">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kulke</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Feyerabend</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Schacht</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>A comparison of the affectiva iMotions facial expression analysis software with EMG for identifying facial expressions of emotion</article-title>. <source>Front. Psychol.</source> <volume>11</volume>, <fpage>329</fpage>. <pub-id pub-id-type="doi">10.3389/fpsyg.2020.00329</pub-id>
</citation>
</ref>
<ref id="B32">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lee</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Lee</surname>
<given-names>Y. K.</given-names>
</name>
<name>
<surname>Lim</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Kang</surname>
<given-names>T.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Emotion recognition using convolutional neural network with selected statistical photoplethysmogram features</article-title>. <source>Appl. SCIENCES-BASEL</source> <volume>10</volume>, <fpage>3501</fpage>. <pub-id pub-id-type="doi">10.3390/app10103501</pub-id>
</citation>
</ref>
<ref id="B53">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lee</surname>
<given-names>M. S.</given-names>
</name>
<name>
<surname>Lee</surname>
<given-names>Y. K.</given-names>
</name>
<name>
<surname>Pae</surname>
<given-names>D. S.</given-names>
</name>
<name>
<surname>Lim</surname>
<given-names>M. T.</given-names>
</name>
<name>
<surname>Kim</surname>
<given-names>D. W.</given-names>
</name>
<name>
<surname>Kang</surname>
<given-names>T. K.</given-names>
</name>
<etal/>
</person-group> (<year>2019</year>). <article-title>Fast emotion recognition based on single pulse PPG signal with convolutional neural network</article-title>. <source>Appl. Sci.</source> <volume>9</volume> (<issue>16</issue>), <fpage>3355</fpage>. <pub-id pub-id-type="doi">10.3390/app9163355</pub-id>
</citation>
</ref>
<ref id="B34">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Xie</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Chai</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>Z.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>A feature-based on potential and differential entropy information for electroencephalogram emotion recognition</article-title>. <source>Electron. Lett.</source> <volume>58</volume>, <fpage>174</fpage>&#x2013;<lpage>177</lpage>. <pub-id pub-id-type="doi">10.1049/ell2.12388</pub-id>
</citation>
</ref>
<ref id="B35">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Yang</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Shi</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Differences in photoplethysmography morphological features and feature time series between two opposite emotions: happiness and sadness</article-title>. <source>Artery Res.</source> <volume>18</volume>, <fpage>7</fpage>. <pub-id pub-id-type="doi">10.1016/j.artres.2017.02.003</pub-id>
</citation>
</ref>
<ref id="B36">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ma</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Zhang</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Ding</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Yang</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>K.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Prediction of early improvement of major depressive disorder to antidepressant medication in adolescents with radiomics analysis after ComBat harmonization based on multiscale structural MRI</article-title>. <source>BMC Psychiatry</source> <volume>23</volume>, <fpage>466</fpage>. <pub-id pub-id-type="doi">10.1186/s12888-023-04966-8</pub-id>
</citation>
</ref>
<ref id="B37">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Maria</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Matthias</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Sten</surname>
<given-names>H.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Emotion recognition from physiological signal analysis: a review</article-title>. <source>Electron. NOTES Theor. Comput. Sci.</source> <volume>343</volume>, <fpage>35</fpage>&#x2013;<lpage>55</lpage>. <pub-id pub-id-type="doi">10.1016/j.entcs.2019.04.009</pub-id>
</citation>
</ref>
<ref id="B38">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Nummenmaa</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Glerean</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Hari</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Hietanen</surname>
<given-names>J. K.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Bodily maps of emotions</article-title>. <source>Proc. Natl. Acad. Sci. U. S. A.</source> <volume>111</volume>, <fpage>646</fpage>&#x2013;<lpage>651</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.1321664111</pub-id>
</citation>
</ref>
<ref id="B39">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ong</surname>
<given-names>A. D.</given-names>
</name>
<name>
<surname>Bergeman</surname>
<given-names>C. S.</given-names>
</name>
<name>
<surname>Bisconti</surname>
<given-names>T. L.</given-names>
</name>
<name>
<surname>Wallace</surname>
<given-names>K. A.</given-names>
</name>
</person-group> (<year>2006</year>). <article-title>Psychological resilience, positive emotions, and successful adaptation to stress in later life</article-title>. <source>J. Pers. Soc. Psychol.</source> <volume>91</volume>, <fpage>730</fpage>&#x2013;<lpage>749</lpage>. <pub-id pub-id-type="doi">10.1037/0022-3514.91.4.730</pub-id>
</citation>
</ref>
<ref id="B40">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Paul</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Chakraborty</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Sadhukhan</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Pal</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Mitra</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>A simplified PPG based approach for automated recognition of five distinct emotional states</article-title>. <source>Multimed. Tools Appl.</source> <volume>83</volume>, <fpage>30697</fpage>&#x2013;<lpage>30718</lpage>. <pub-id pub-id-type="doi">10.1007/s11042-023-16744-5</pub-id>
</citation>
</ref>
<ref id="B41">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Rainville</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Bechara</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Naqvi</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Damasio</surname>
<given-names>A. R.</given-names>
</name>
</person-group> (<year>2006</year>). <article-title>Basic emotions are associated with distinct patterns of cardiorespiratory activity</article-title>. <source>Int. J. Psychophysiol.</source> <volume>61</volume>, <fpage>5</fpage>&#x2013;<lpage>18</lpage>. <pub-id pub-id-type="doi">10.1016/j.ijpsycho.2005.10.024</pub-id>
</citation>
</ref>
<ref id="B42">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Rodellar-Biarge</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Palacios-Alonso</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Nieto-Lluis</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Gomez-Vilda</surname>
<given-names>P.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Towards the search of detection in speech-relevant features for stress</article-title>. <source>Expert Syst.</source> <volume>32</volume>, <fpage>710</fpage>&#x2013;<lpage>718</lpage>. <pub-id pub-id-type="doi">10.1111/exsy.12109</pub-id>
</citation>
</ref>
<ref id="B43">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Rosner</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Grove</surname>
<given-names>D.</given-names>
</name>
</person-group> (<year>1999</year>). <article-title>Use of the Mann-Whitney U-test for clustered data</article-title>. <source>Stat. Med.</source> <volume>18</volume>, <fpage>1387</fpage>&#x2013;<lpage>1400</lpage>. <pub-id pub-id-type="doi">10.1002/(sici)1097-0258(19990615)18:11&#x3c;1387::aid-sim126&#x3e;3.0.co;2-v</pub-id>
</citation>
</ref>
<ref id="B44">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sarkar</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Etemad</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Self-supervised ECG representation learning for emotion recognition</article-title>. <source>IEEE T. Affect. Comput.</source> <volume>13</volume>, <fpage>1541</fpage>&#x2013;<lpage>1554</lpage>. <pub-id pub-id-type="doi">10.1109/TAFFC.2020.3014842</pub-id>
</citation>
</ref>
<ref id="B45">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sarma</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Barma</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Emotion recognition by distinguishing appropriate EEG segments based on random matrix theory</article-title>. <source>Biomed. Signal Proces.</source> <volume>70</volume>, <fpage>102991</fpage>. <pub-id pub-id-type="doi">10.1016/j.bspc.2021.102991</pub-id>
</citation>
</ref>
<ref id="B46">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sato</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Fujimura</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Suzuki</surname>
<given-names>N.</given-names>
</name>
</person-group> (<year>2008</year>). <article-title>Enhanced facial EMG activity in response to dynamic facial expressions</article-title>. <source>Int. J. Psychophysiol.</source> <volume>70</volume>, <fpage>70</fpage>&#x2013;<lpage>74</lpage>. <pub-id pub-id-type="doi">10.1016/j.ijpsycho.2008.06.001</pub-id>
</citation>
</ref>
<ref id="B47">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shu</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Xie</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Yang</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Liao</surname>
<given-names>D.</given-names>
</name>
<etal/>
</person-group> (<year>2018</year>). <article-title>A review of emotion recognition using physiological signals</article-title>. <source>Sensors-Basel</source> <volume>18</volume>, <fpage>2074</fpage>. <pub-id pub-id-type="doi">10.3390/s18072074</pub-id>
</citation>
</ref>
<ref id="B48">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Virtanen</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Gommers</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Oliphant</surname>
<given-names>T. E.</given-names>
</name>
<name>
<surname>Haberland</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Reddy</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Cournapeau</surname>
<given-names>D.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>SciPy 1.0: fundamental algorithms for scientific computing in Python</article-title>. <source>Nat. Methods</source> <volume>17</volume>, <fpage>261</fpage>&#x2013;<lpage>272</lpage>. <pub-id pub-id-type="doi">10.1038/s41592-019-0686-2</pub-id>
</citation>
</ref>
<ref id="B49">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Wang</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Yu</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2021</year>). &#x201c;<article-title>Emotion recognition based on photoplethysmography using ResNet and BiLSTM networks</article-title>,&#x201d; in <source>2021 international conference on E-health and bioengineering (EHB 2021)</source>. <edition>9TH EDITION</edition>. <pub-id pub-id-type="doi">10.1109/EHB52898.2021.9657742</pub-id>
</citation>
</ref>
<ref id="B50">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname>
<given-names>X. Y.</given-names>
</name>
<name>
<surname>Zhou</surname>
<given-names>H. L.</given-names>
</name>
<name>
<surname>Xue</surname>
<given-names>W. C.</given-names>
</name>
<name>
<surname>Zhu</surname>
<given-names>Z. B.</given-names>
</name>
<name>
<surname>Jiang</surname>
<given-names>W. N.</given-names>
</name>
<name>
<surname>Feng</surname>
<given-names>J. W.</given-names>
</name>
<etal/>
</person-group> (<year>2022</year>). <article-title>The hybrid discrete-dimensional frame method for emotional film selection</article-title>. <source>Curr. Psychol.</source> <volume>42</volume>, <fpage>30077</fpage>&#x2013;<lpage>30092</lpage>. <pub-id pub-id-type="doi">10.1007/s12144-022-04038-2</pub-id>
</citation>
</ref>
<ref id="B51">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wen</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Cheng</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Wei</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Shangguan</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Huang</surname>
<given-names>W.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Emotion recognition based on multi-variant correlation of physiological signals</article-title>. <source>IEEE T. Affect. Comput.</source> <volume>5</volume>, <fpage>126</fpage>&#x2013;<lpage>140</lpage>. <pub-id pub-id-type="doi">10.1109/TAFFC.2014.2327617</pub-id>
</citation>
</ref>
<ref id="B52">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yang</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Shu</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Ji</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Ren</surname>
<given-names>Z.</given-names>
</name>
<etal/>
</person-group> (<year>2024</year>). <article-title>A multimodal dataset for mixed emotion recognition</article-title>. <source>Sci. DATA</source> <volume>11</volume>, <fpage>847</fpage>. <pub-id pub-id-type="doi">10.1038/s41597-024-03676-4</pub-id>
</citation>
</ref>
</ref-list>
</back>
</article>