<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Hum. Neurosci.</journal-id>
<journal-title>Frontiers in Human Neuroscience</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Hum. Neurosci.</abbrev-journal-title>
<issn pub-type="epub">1662-5161</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fnhum.2024.1467403</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Neuroscience</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Understanding emotional influences on sustained attention: a study using virtual reality and neurophysiological monitoring</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Shen</surname> <given-names>Yang</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1671110/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Zheng</surname> <given-names>Huijia</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/2719869/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Li</surname> <given-names>Yu</given-names></name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Tian</surname> <given-names>Xuetao</given-names></name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/2626698/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>China Basic Education Quality Monitoring Collaborative Innovation Center, Beijing Normal University</institution>, <addr-line>Beijing</addr-line>, <country>China</country></aff>
<aff id="aff2"><sup>2</sup><institution>Peabody College of Education and Human Development, Vanderbilt University</institution>, <addr-line>Nashville, TN</addr-line>, <country>United States</country></aff>
<aff id="aff3"><sup>3</sup><institution>Beijing Key Laboratory of Applied Experimental Psychology, National Demonstration Center for Experimental Psychology Education, Faculty of Psychology, Beijing Normal University</institution>, <addr-line>Beijing</addr-line>, <country>China</country></aff>
<author-notes>
<fn fn-type="edited-by"><p>Edited by: Tao Xu, Northwestern Polytechnical University, China</p></fn>
<fn fn-type="edited-by"><p>Reviewed by: Shihui Guo, Xiamen University, China</p><p>Yi Feng, Central University of Finance and Economics, China</p></fn>
<corresp id="c001">&#x002A;Correspondence: Xuetao Tian, <email>xttian@bnu.edu.cn</email></corresp>
</author-notes>
<pub-date pub-type="epub">
<day>17</day>
<month>10</month>
<year>2024</year>
</pub-date>
<pub-date pub-type="collection">
<year>2024</year>
</pub-date>
<volume>18</volume>
<elocation-id>1467403</elocation-id>
<history>
<date date-type="received">
<day>23</day>
<month>07</month>
<year>2024</year>
</date>
<date date-type="accepted">
<day>30</day>
<month>09</month>
<year>2024</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2024 Shen, Zheng, Li and Tian.</copyright-statement>
<copyright-year>2024</copyright-year>
<copyright-holder>Shen, Zheng, Li and Tian</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p></license>
</permissions>
<abstract>
<sec>
<title>Introduction</title>
<p>Emotion and attention regulation significantly influence various aspects of human functioning and behavior. However, the interaction between emotion and attention in affecting performance remains underexplored. This study aims to investigate how individual differences in sustained attention, influenced by varying emotional states.</p>
</sec>
<sec>
<title>Methods</title>
<p>A total of 12 participants underwent emotion induction through Virtual Reality (VR) videos; completed an AX-CPT (continuous performance test) task to measure sustained attention, for which task performance is evaluated from two aspects, task accuracy and task reaction times; and reported their flow states. EEG and PPG data were collected throughout the sessions, as supporting evidence for sustained attention.</p>
</sec>
<sec>
<title>Results</title>
<p>Our findings suggest that emotional valence and arousal significantly influence task reaction times and sustained attention, when gender differences are accounted for, but do not significantly impact task accuracy. Specifically, males responded faster under high-arousal negative emotions, while females responded faster under high-arousal positive emotions. Additionally, we find that flow experience is not significantly impacted by emotions states or sustained attention.</p>
</sec>
<sec>
<title>Discussion</title>
<p>The study underscores the nuanced interplay between emotion, sustained attention, and task performance, suggesting that emotional states can differentially impact cognitive processes. Also, it support the use of VR, EEG, and PPG technologies in future research on related topics. Future research could expand upon this study by including larger sample sizes and a wider range of emotional inductions to generalize the findings.</p>
</sec>
</abstract>
<kwd-group>
<kwd>sustained attention</kwd>
<kwd>emotion</kwd>
<kwd>virtual reality</kwd>
<kwd>electroencephalogram</kwd>
<kwd>photoplethysmography</kwd>
</kwd-group>
<contract-num rid="cn001">62207002</contract-num>
<contract-num rid="cn001">62307003</contract-num>
<contract-sponsor id="cn001">National Natural Science Foundation of China<named-content content-type="fundref-id">10.13039/501100001809</named-content></contract-sponsor>
<counts>
<fig-count count="4"/>
<table-count count="9"/>
<equation-count count="0"/>
<ref-count count="36"/>
<page-count count="10"/>
<word-count count="5763"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Cognitive Neuroscience</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec id="S1" sec-type="intro">
<title>1 Introduction</title>
<p>Emotion and attention regulation both play important roles in adaptive functioning and behavior (<xref ref-type="bibr" rid="B24">Pekrun et al., 2002</xref>; <xref ref-type="bibr" rid="B35">Westphal et al., 2018</xref>; <xref ref-type="bibr" rid="B36">Whitehill et al., 2014</xref>). In daily life, we are constantly faced by large amounts of information, which we use attention to filter and process (<xref ref-type="bibr" rid="B6">Brosch et al., 2013</xref>); emotions, in turn, pose effect upon our attention profiles, changing the portion of information that we attend to, thus modulating behavior (<xref ref-type="bibr" rid="B23">Mitchell, 2022</xref>). Recently, the interaction between emotion and attention have come into focus in the fields of education and psychology (<xref ref-type="bibr" rid="B34">Wass et al., 2021</xref>; <xref ref-type="bibr" rid="B2">Baykal, 2022</xref>), as the specific process in which they impact performance remains unclear. In related research, a commonly applied theory of emotion is the dimensional theory, which distinguishes emotions based on their positions in a continuous multi-dimensional space, characterized by two primary dimensions: valence (degree of positivity or negativity of the emotion) and arousal (physiological activation of the emotion, high to low) (<xref ref-type="bibr" rid="B27">Russell, 1980</xref>). Existing research have shown that emotional arousal and valence can modulate attention allocation and selection (<xref ref-type="bibr" rid="B7">Compton, 2003</xref>; <xref ref-type="bibr" rid="B25">Phelps et al., 2006</xref>). However, findings on the specific relationship between different emotional dimensions and attention have yet to concur. Some studies indicate that high-arousal emotions increase attention directed to high-priority stimuli, but decrease attention toward low-priority stimuli (<xref ref-type="bibr" rid="B22">Mather and Sutherland, 2011</xref>). Other studies have identified that negative emotions can lead to global attention, whereas positive emotions lead to more local attention (<xref ref-type="bibr" rid="B11">Gasper and Clore, 2002</xref>). Further complicating the picture, arousal and valence also interact to produce varied effects on task performance, possibly related to their moderation on attention. For instance, research found that low-arousal negative affect enhances target recognition accuracy, high-arousal negative affect lower target accuracy, while positive affect&#x2019;s influence on target accuracy do not differ significantly with different level of arousal (<xref ref-type="bibr" rid="B15">Jefferies et al., 2008</xref>).</p>
<p>Hence, it seems necessary to unify these research outcomes into a more comprehensive picture. In this study, we use task-specific sustained attention, i.e., the ability to maintain focus on the experiment task, as a bridging factor between attention and task performance, while also taking individual differences into consideration, in hopes for explicating the influence of different emotional dimensions on attention and task performance. This is measured through reaction time and accuracy performance on an AX-CPT (continuous performance test) task. In order to further clarify the picture, we also take one step forward from existing research by employing new methodologies to improve validity. Immersive Virtual Reality (VR) technology is used to enhance the ecological validity of emotion induction, in comparison to traditional induction measures, while sustained attention is additionally assessed through objective measures including electroencephalogram (EEG) and photoplethysmography (PPG), which could likely reflect sustained attention more directly than common subjective/indirect measures. To this end, we also integrate prior findings on physiological response to task activation, to form a set of signals that could be representative of sustained attention. All in all, our primary goal for this study is to explore how different valence and arousal of emotions impact sustained attention, as demonstrated by EEG and PPG data. Along the way, we also validate the application of VR, EEG and PPG technology in future related studies.</p>
<p>In line with this, we test a secondary hypothesis as well. The flow state is a subjective experience of effortless concentration (<xref ref-type="bibr" rid="B8">Csikszentmihalyi, 1975</xref>). Research on its relationship with attention has yielded inconsistent results: some studies suggested that people who frequently experience the flow state show more sustained attention (<xref ref-type="bibr" rid="B30">Swann et al., 2012</xref>), while others found no significant relationship between flow and sustained attention, since flow is an automatic, unconscious process, while sustained attention requires effort (<xref ref-type="bibr" rid="B20">Marty-Dugas and Smilek, 2019</xref>; <xref ref-type="bibr" rid="B28">Schiefele and Raabe, 2011</xref>; <xref ref-type="bibr" rid="B32">Ullen et al., 2012</xref>). These conflicts bring up our secondary hypothesis: Is experiencing the flow state related to sustained attention and emotion valence/arousal? If participants tend to experience flow states more frequently during certain attention/emotion states, this may also lead to differential task performance, and thus produce confused results (<xref ref-type="bibr" rid="B13">Harris et al., 2021</xref>). This hypothesis serves to resolve one more factor that may confound the effect of different emotion states on attention and task performance.</p>
</sec>
<sec id="S2" sec-type="materials|methods">
<title>2 Materials and methods</title>
<sec id="S2.SS1">
<title>2.1 Stimuli, paradigms and equipment</title>
<p>This study aims to investigate the impact of different emotional states on sustained attention, with a side focus on flow. The necessary instruments are detailed below.</p>
<p>Immersive VR videos were utilized to induce different emotional states, presented in a HTC Vive Pro HMD. These were selected from the Stanford Immersive Virtual Reality Video Database (<xref ref-type="bibr" rid="B19">Li et al., 2017</xref>), which contains 73 VR clips categorized according to emotion valence and arousal (<xref ref-type="bibr" rid="B27">Russell, 1980</xref>). In the end, four videos with scores closest to the quadrant extremes of the valence and arousal dimensions were chosen to achieve optimal emotion induction, as shown in <xref ref-type="fig" rid="F1">Figure 1</xref>.</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption><p>Screenshots of four VR videos used for emotion induction, including &#x201C;negative-valence, high-arousal,&#x201D; &#x201C;positive-valence, high-arousal,&#x201D; &#x201C;negative-valence, low-arousal,&#x201D; and &#x201C;positive-valence, low-arousal.&#x201D; Images reproduced from <xref ref-type="bibr" rid="B19">Li et al. (2017)</xref>.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnhum-18-1467403-g001.tif"/>
</fig>
<p>A subjective emotion self-report scale, the Self-Assessment Manikin (SAM; <xref ref-type="bibr" rid="B3">Bradley and Lang, 1994</xref>), was used to evaluate participants&#x2019; emotion arousal and valence. The AX-CPT (continuous performance test) paradigm was used to assess sustained attention. In AX-CPT, participants are instructed to respond to letter sequences, with &#x201C;X&#x201D; as the target stimulus, but only if preceded by the letter &#x201C;A&#x201D;. The sequences include four types: AX (target), AY, BX, and BY, where &#x201C;B&#x201D; can be any letter other than &#x201C;A,&#x201D; and &#x201C;Y&#x201D; can be any letter other than &#x201C;X.&#x201D; The classic AX-CPT paradigm comprised 70% AX sequences and 10% each of AY, BX, and BY sequences (<xref ref-type="bibr" rid="B5">Braver et al., 2007</xref>). Participants&#x2019; accuracy and reaction times were recorded during the task as performance criteria. The Flow Short Scale (<xref ref-type="bibr" rid="B10">Engeser and Rheinberg, 2008</xref>), a 10-item Likert scale, was used to measure flow experience during the AX-CPT task.</p>
<p>During the AX-CPT task, a Shimmer3 wearable device was used for ECG data collection, and an ANT Neuro system was used for EEG data collection, as shown in <xref ref-type="fig" rid="F2">Figure 2</xref>.</p>
<fig id="F2" position="float">
<label>FIGURE 2</label>
<caption><p>Experimental equipment.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnhum-18-1467403-g002.tif"/>
</fig>
</sec>
<sec id="S2.SS2">
<title>2.2 Participants</title>
<p>A total of twelve participants were recruited for this study, 6 male and 6 female, aged 21 to 45 years (mean age = 31.2), right-handed and with normal or corrected-to-normal vision. Fields of expertise of the participants include computer science, education, psychology, foreign languages, marketing, and applied chemistry. All participants had no prior experience with similar experiments.</p>
</sec>
<sec id="S2.SS3">
<title>2.3 Procedure</title>
<p>The experiment was conducted one participant at a time without time limit, guided by a trained researcher in a quiet room and under constant screen brightness. Before the experiment, participants were introduced to the study, asked to sign a consent form, prepared for EEG and PPG collection, and instructed to fill out a demographic questionnaire covering gender, age, occupation, handedness, as well as previous VR experience. Then, participants were asked to stay stationary for 5 min, where a baseline SAM score was acquired. The experiment consists of four sessions, each with five steps, as shown in <xref ref-type="fig" rid="F3">Figure 3</xref>. First, participants were randomly assigned to one of four conditions, each representing one of the four emotional dimensions: negative-valence high-arousal, negative-valence low-arousal, positive-valence high-arousal, or positive-valence low-arousal, to receive corresponding emotion induction through a VR video approximately 3 min and 30 s long. Afterwards, participants filled out the SAM scale, completed the AX-CPT attention task, and filled out the Flow Short Scale. Finally, participants take a 5-min rest before the next session.</p>
<fig id="F3" position="float">
<label>FIGURE 3</label>
<caption><p>The steps in one session of the experiment.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnhum-18-1467403-g003.tif"/>
</fig>
</sec>
<sec id="S2.SS4">
<title>2.4 Electroencephalogram (EEG) pre-processing</title>
<p>Before EEG data analysis, preprocessing was performed with EEGLAB (<xref ref-type="bibr" rid="B9">Delorme and Makeig, 2004</xref>), an open-source toolbox based on MATLAB, during which a band-pass FIR filter from 3 Hz to 47 Hz was applied.</p>
</sec>
<sec id="S2.SS5">
<title>2.5 Photoplethysmography (PPG) data</title>
<p>This study analyzed Heart Rate Variability (HRV), as calculated from PPG, to reflect autonomic responses that result from the sustained attention task. HRV analysis generally consists of two aspects, time-domain features and frequency-domain features.</p>
<p>Time-domain features reflect heart rate variability and autonomic regulation, including:</p>
<list list-type="simple">
<list-item>
<label>&#x2022;</label>
<p>MEANRR: Mean reciprocal of RR intervals, indicating heart rate stability, where RR interval is the time interval between two peaks.</p>
</list-item>
<list-item>
<label>&#x2022;</label>
<p>MEDIANRR: Median RR intervals, providing a robust measure against outliers.</p>
</list-item>
<list-item>
<label>&#x2022;</label>
<p>MEANHR: Mean heart rate, linked to cardiovascular health and autonomic function.</p>
</list-item>
<list-item>
<label>&#x2022;</label>
<p>SDNN: Standard deviation of NN intervals, reflecting overall HRV, where NN interval is the normal heartbeat interval.</p>
</list-item>
<list-item>
<label>&#x2022;</label>
<p>RMSSD: Root mean square of successive differences, indicating parasympathetic activity.</p>
</list-item>
<list-item>
<label>&#x2022;</label>
<p>pNN50: Percentage of NN intervals with differences over 50 ms, reflecting parasympathetic activity.</p>
</list-item>
</list>
<p>Frequency-domain features, which primarily reflects parasympathetic activity (<xref ref-type="bibr" rid="B18">Kumar et al., 2023</xref>), were analyzed using resampling, interpolation, and Fast Fourier Transform (FFT), to obtain Power Spectral Density (PSD). The final features includes:</p>
<list list-type="simple">
<list-item>
<label>&#x2022;</label>
<p>VLF: Very low frequency (0.0033&#x2013;0.04 Hz), associated with long-term regulatory mechanisms.</p>
</list-item>
<list-item>
<label>&#x2022;</label>
<p>LF: Low frequency (0.04&#x2013;0.15 Hz), reflecting sympathetic and parasympathetic activity.</p>
</list-item>
<list-item>
<label>&#x2022;</label>
<p>HF: High frequency (0.15&#x2013;0.4 Hz), indicating parasympathetic (vagal) regulation.</p>
</list-item>
<list-item>
<label>&#x2022;</label>
<p>LF/HF Ratio: Evaluation on the balance between sympathetic and parasympathetic activity.</p>
</list-item>
<list-item>
<label>&#x2022;</label>
<p>Normalized LF (LF[n.u.]) and HF (HF[n.u.]): Relative power in their respective ranges, providing comparative importance via transforming absolute power to normalized units.</p>
</list-item>
</list>
<p>Absolute Power VLF (VLF[abs]), LF (LF[abs]), and HF (HF[abs]): Reflect the energy distribution in their respective frequency ranges, related to specific physiological mechanisms.</p>
</sec>
</sec>
<sec id="S3" sec-type="results">
<title>3 Results</title>
<sec id="S3.SS1">
<title>3.1 Emotion induction</title>
<p>Due to the small sample size, we choose a non-parametric test, the Wilcoxon signed-rank test, to analyze differences in accuracy and reaction times in the AX-CPT task under different emotional states. Mean SAM scores among 12 participants are calculated for the four virtual reality videos. Firstly, we check to ensure that the emotion induction took effect. Results are presented in <xref ref-type="table" rid="T1">Table 1</xref>. Wilcoxon signed-rank tests showed significant differences in valence (<italic>Z</italic> = 3.07, <italic>p</italic> = 0.002) and arousal scores (<italic>Z</italic> = 3.01, <italic>p</italic> = 0.003) between corresponding videos. The virtual reality videos effectively elicited corresponding emotional responses.</p>
<table-wrap position="float" id="T1">
<label>TABLE 1</label>
<caption><p>Valence and arousal scores from SAM scale.</p></caption>
<table cellspacing="5" cellpadding="5" frame="box" rules="all">
<thead>
<tr>
<td valign="top" align="left" style="color:#ffffff;background-color: #7f8080;">Emotion dimension</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Valence (M &#x00B1; SD)</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Arousal (M &#x00B1; SD)</td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Negative-valence, low-arousal</td>
<td valign="top" align="center">3.17 &#x00B1; 1.80</td>
<td valign="top" align="center">4.17 &#x00B1; 2.17</td>
</tr>
<tr>
<td valign="top" align="left">Negative-valence, high-arousal</td>
<td valign="top" align="center">2.25 &#x00B1; 1.66</td>
<td valign="top" align="center">8.75 &#x00B1; 1.29</td>
</tr>
<tr>
<td valign="top" align="left">Positive-valence, high-arousal</td>
<td valign="top" align="center">7.08 &#x00B1; 1.62</td>
<td valign="top" align="center">6.42 &#x00B1; 1.83</td>
</tr>
<tr>
<td valign="top" align="left">Positive-valence, low-arousal</td>
<td valign="top" align="center">7.42 &#x00B1; 1.08</td>
<td valign="top" align="center">5.00 &#x00B1; 2.17</td>
</tr>
</tbody>
</table></table-wrap>
</sec>
<sec id="S3.SS2">
<title>3.2 Emotion and AX-CPT task data</title>
<p>Using the Wilcoxon signed-rank test, we analyze accuracy rates and reaction times during the AX-CPT task under the four emotional dimensions, and consider the influence of gender on the results.</p>
<p>As reported in <xref ref-type="table" rid="T2">Table 2</xref>, accuracy rates for positive emotions (<italic>M</italic> = 98.33, <italic>SD</italic> = 1.13, <italic>Mdn</italic> = 98.25) and negative emotions (<italic>M</italic> = 98.50, <italic>SD</italic> = 1.19, <italic>Mdn</italic> = 98.75) show no significant difference (<italic>Z</italic> = 0.49, <italic>p</italic> = 0.620). Accuracy rates for high-arousal emotions (<italic>M</italic> = 98.29, <italic>SD</italic> = 1.01, <italic>Mdn</italic> = 98.50) and low-arousal emotions (<italic>M</italic> = 98.54, <italic>SD</italic> = 1.16, <italic>Mdn</italic> = 98.75) also show no significant difference (<italic>Z</italic> = 1.08, <italic>p</italic> = 10.280). Also, reaction times for positive emotions (<italic>M</italic> = 30098.58, <italic>SD</italic> = 5758.64, <italic>Mdn</italic> = 29501.00) and negative emotions (<italic>M</italic> = 28465.17, <italic>SD</italic> = 5166.93, <italic>Mdn</italic> = 28273.00) show no significant difference (<italic>Z</italic> = 1.73, <italic>p</italic> = 0.084). Similarly, reaction times for high-arousal (<italic>M</italic> = 29061.67, <italic>SD</italic> = 4897.09, <italic>Mdn</italic> = 29341.00) and low-arousal emotions (<italic>M</italic> = 29502.08, <italic>SD</italic> = 5712.97, <italic>Mdn</italic> = 28618.00) show no significant difference (<italic>Z</italic> = 0.784, <italic>p</italic> = 0.433).</p>
<table-wrap position="float" id="T2">
<label>TABLE 2</label>
<caption><p>Influence of emotion valence and arousal on AX-CPT task performance.</p></caption>
<table cellspacing="5" cellpadding="5" frame="box" rules="all">
<thead>
<tr>
<td valign="top" align="left" style="color:#ffffff;background-color: #7f8080;">Emotion dimension</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Reaction time (M &#x00B1; SD)</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Accuracy (M &#x00B1; SD)</td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Positive-valence</td>
<td valign="top" align="center">30098.58 &#x00B1; 5758.64</td>
<td valign="top" align="center">98.33 &#x00B1; 1.14</td>
</tr>
<tr>
<td valign="top" align="left">Negative-valence</td>
<td valign="top" align="center">28465.17 &#x00B1; 5166.93</td>
<td valign="top" align="center">98.50 &#x00B1; 1.19</td>
</tr>
<tr>
<td valign="top" align="left">High-arousal</td>
<td valign="top" align="center">29061.67 &#x00B1; 4897.09</td>
<td valign="top" align="center">98.29 &#x00B1; 1.01</td>
</tr>
<tr>
<td valign="top" align="left">Low-arousal</td>
<td valign="top" align="center">29502.08 &#x00B1; 5712.97</td>
<td valign="top" align="center">98.54 &#x00B1; 1.16</td>
</tr>
</tbody>
</table></table-wrap>
<p>Further, as shown in <xref ref-type="table" rid="T3">Table 3</xref>, analyzing the influence of emotional valence, arousal, and gender on accuracy using the Wilcoxon signed-rank test revealed no significant differences (<italic>Z</italic> = 0.68, <italic>p</italic> = 0.500) in accuracy rates between males under high-arousal negative (<italic>M</italic> = 97.67, <italic>SD</italic> = 1.63, <italic>Mdn</italic> = 97.50) and high-arousal positive emotions (<italic>M</italic> = 98.33, <italic>SD</italic> = 1.37, <italic>Mdn</italic> = 98.00). Also, as shown in <xref ref-type="table" rid="T4">Table 4</xref>, for females, accuracy rates under high-arousal positive emotions (<italic>M</italic> = 98.00, <italic>SD</italic> = 1.55, <italic>Mdn</italic> = 98.00) and low-arousal positive emotions (<italic>M</italic> = 99.17, <italic>SD</italic> = 0.98, <italic>Mdn</italic> = 99.50) show no significant difference (<italic>Z</italic> = 1.63, <italic>p</italic> = 0.100). In comparison, for reaction time, males exhibit significantly shorter reaction times (<italic>Z</italic> = 2.20, <italic>p</italic> = 0.028, <italic>Cohen</italic>&#x2032;<italic>sd</italic> = 1.16) under high-arousal negative emotions (<italic>M</italic> = 26449.17, <italic>SD</italic> = 4046.69, <italic>Mdn</italic> = 26046.00) compared to high-arousal positive emotions (<italic>M</italic> = 27511.17, <italic>SD</italic> = 3594.44, <italic>Mdn</italic> = 30373.00). For females, reaction times are significantly shorter (<italic>Z</italic> = 1.99, <italic>p</italic> = 0.046, <italic>Cohen</italic>&#x2032;<italic>sd</italic> = 0.79) under high-arousal positive emotions (<italic>M</italic> = 30362.50, <italic>SD</italic> = 5326.55) compared to low-arousal positive emotions (<italic>M</italic> = 33004.17, <italic>SD</italic> = 8145.73).</p>
<table-wrap position="float" id="T3">
<label>TABLE 3</label>
<caption><p>Influence of emotion on AX-CPT task performance for males.</p></caption>
<table cellspacing="5" cellpadding="5" frame="box" rules="all">
<thead>
<tr>
<td valign="top" align="left" style="color:#ffffff;background-color: #7f8080;">Emotion dimension</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Reaction time (M &#x00B1; SD)</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Accuracy (M &#x00B1; SD)</td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Positive-valence, high-arousal</td>
<td valign="top" align="center">29183.00 &#x00B1; 4295.00</td>
<td valign="top" align="center">98.33 &#x00B1; 1.37</td>
</tr>
<tr>
<td valign="top" align="left">Positive-valence, low-arousal</td>
<td valign="top" align="center">27844.00 &#x00B1; 5833.00</td>
<td valign="top" align="center">97.83 &#x00B1; 1.72</td>
</tr>
<tr>
<td valign="top" align="left">Negative-valence, high-arousal</td>
<td valign="top" align="center">26449.00 &#x00B1; 4047.00</td>
<td valign="top" align="center">97.67 &#x00B1; 1.63</td>
</tr>
<tr>
<td valign="top" align="left">Negative-valence, low-arousal</td>
<td valign="top" align="center">27511.00 &#x00B1; 3594.00</td>
<td valign="top" align="center">98.17 &#x00B1; 1.33</td>
</tr>
</tbody>
</table></table-wrap>
<table-wrap position="float" id="T4">
<label>TABLE 4</label>
<caption><p>Influence of emotion on AX-CPT task performance for females.</p></caption>
<table cellspacing="5" cellpadding="5" frame="box" rules="all">
<thead>
<tr>
<td valign="top" align="left" style="color:#ffffff;background-color: #7f8080;">Emotion dimension</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Reaction time (M &#x00B1; SD)</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Accuracy (M &#x00B1; SD)</td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Positive-valence, high-arousal</td>
<td valign="top" align="center">30362.50 &#x00B1; 5326.55</td>
<td valign="top" align="center">98.00 &#x00B1; 1.55</td>
</tr>
<tr>
<td valign="top" align="left">Positive-valence, low-arousal</td>
<td valign="top" align="center">33004.17 &#x00B1; 8145.73</td>
<td valign="top" align="center">99.17 &#x00B1; 0.98</td>
</tr>
<tr>
<td valign="top" align="left">Negative-valence, high-arousal</td>
<td valign="top" align="center">30251.67 &#x00B1; 6598.45</td>
<td valign="top" align="center">99.17 &#x00B1; 1.17</td>
</tr>
<tr>
<td valign="top" align="left">Negative-valence, low-arousal</td>
<td valign="top" align="center">29648.67 &#x00B1; 6490.87</td>
<td valign="top" align="center">99.00 &#x00B1; 1.10</td>
</tr>
</tbody>
</table></table-wrap>
<p>There results suggest that emotional valence and arousal do not significantly affect task accuracy performance across different genders, but significantly impact reaction time performance. Males display shorter reaction times, i.e., better sustained attention, under high-arousal negative emotions than high-arousal positive emotions, while females display shorter reaction times under high-arousal positive emotions than low-arousal positive emotions. These results correspond with previous studies such as <xref ref-type="bibr" rid="B4">Bradley et al. (2001)</xref>, that found males to show greater physiological reactivity toward negative emotions, and females to show greater reactivity toward positive emotions.</p>
</sec>
<sec id="S3.SS3">
<title>3.3 Emotion and flow experience data</title>
<p>Flow experience scores are displayed in <xref ref-type="table" rid="T5">Table 5</xref>. The Wilcoxon signed-rank test is used to assess differences in flow experience across the four sessions of emotional dimensions. Results indicate that emotional valence (positive: <italic>M</italic> = 52.46, <italic>SD</italic> = 9.17, <italic>Mdn</italic> = 50.75 and negative: <italic>M</italic> = 55.71, <italic>SD</italic> = 9.41, <italic>Mdn</italic> = 57.50) and arousal (high: <italic>M</italic> = 54.58, <italic>SD</italic> = 8.68, <italic>Mdn</italic> = 55.25 and low: <italic>M</italic> = 53.58, <italic>SD</italic> = 9.34, <italic>Mdn</italic> = 52.25) do not pose significant impact on flow experience during the AX-CPT (valence: <italic>Z</italic> = 1.49, <italic>p</italic> = 0.136 and arousal: <italic>Z</italic> = 0.45, <italic>p</italic> = 0.656). When accounting for gender, no significant difference (<italic>Z</italic> = 0.32, <italic>p</italic> = 0.750) in flow experience is found for males between high-arousal negative emotions (<italic>M</italic> = 50.67, <italic>SD</italic> = 10.29, <italic>Mdn</italic> = 50.50) and high-arousal positive emotions (<italic>M</italic> = 51.50, <italic>SD</italic> = 3.67, <italic>Mdn</italic> = 50.00). Neither is significant differences (<italic>Z</italic> = 0.68, <italic>p</italic> = 0.500) found for females between high-arousal positive emotion (<italic>M</italic> = 54.67, <italic>SD</italic> = 12.45, <italic>Mdn</italic> = 54.50) and low-arousal positive emotion (<italic>M</italic> = 56.00, <italic>SD</italic> = 11.35, <italic>Mdn</italic> = 56.00) conditions.</p>
<table-wrap position="float" id="T5">
<label>TABLE 5</label>
<caption><p>Influence of emotion on flow experience.</p></caption>
<table cellspacing="5" cellpadding="5" frame="box" rules="all">
<thead>
<tr>
<td valign="top" align="left" style="color:#ffffff;background-color: #7f8080;">Emotion dimension</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Flow experience score (M &#x00B1; SD)</td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Positive-valence</td>
<td valign="top" align="center">52.46 &#x00B1; 9.17</td>
</tr>
<tr>
<td valign="top" align="left">Negative-valence</td>
<td valign="top" align="center">55.71 &#x00B1; 9.41</td>
</tr>
<tr>
<td valign="top" align="left">High-arousal</td>
<td valign="top" align="center">54.58 &#x00B1; 8.68</td>
</tr>
<tr>
<td valign="top" align="left">Low-arousal</td>
<td valign="top" align="center">53.58 &#x00B1; 9.33</td>
</tr>
</tbody>
</table></table-wrap>
</sec>
<sec id="S3.SS4">
<title>3.4 EEG data analysis</title>
<p>Differential brain activity due to variations in attention are commonly reflected in the frequency bands (<xref ref-type="bibr" rid="B33">Wang et al., 2011</xref>). Here, we extract and compare frequency domain characteristics, specifically Power Spectral Density (PSD), to reflect the changes of sustained attention in relation to emotion. PSD describes the distribution of signal power across frequencies. We calculate the PSD of &#x03B1;, &#x03B2;, and &#x03B3; bands, as well as the sustained attention formula <inline-formula><mml:math id="INEQ102"><mml:mfrac><mml:mi mathvariant="normal">&#x03B2;</mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x03B1;</mml:mi><mml:mo>+</mml:mo><mml:mi mathvariant="normal">&#x03B8;</mml:mi></mml:mrow></mml:mfrac></mml:math></inline-formula>, using the Welch method from the Python-MNE toolkit (<xref ref-type="bibr" rid="B12">Gramfort et al., 2013</xref>). In detail, the Welch method divides the signal into <italic>n</italic> segments that allow overlap, which improves the signal&#x2019;s variance properties (<xref ref-type="bibr" rid="B29">Solomon, 1991</xref>), windows the data, and computes the average PSD of multiple segments. In particular, the Hanning window is chosen for windowing to mitigate spectral distortion caused by rectangular windows (<xref ref-type="bibr" rid="B14">Harris, 1978</xref>). Additionally, baseline correction is performed by subtracting PSD values from the baseline phase, i.e. the first 5 min of each session, from the PSD values of each session. Finally, these PSD values are averaged over &#x03B1; (7&#x2013;13 Hz), &#x03B2; (14&#x2013;29 Hz), and &#x03B3; (30&#x2013;47 Hz) bands, to be used for analysis.</p>
<p>For the sake of analysis, high and low sustained attention emotion conditions are divided based on AX-CPT task performance, where only the groups with significant differences are retained, resulting in 12 samples with high sustained attention and 12 samples with low sustained attention. Meanwhile, normality tests are conducted for EEG power in &#x03B1;, &#x03B2;, &#x03B3; bands, and <inline-formula><mml:math id="INEQ103"><mml:mfrac><mml:mi mathvariant="normal">&#x03B2;</mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x03B1;</mml:mi><mml:mo>+</mml:mo><mml:mi mathvariant="normal">&#x03B8;</mml:mi></mml:mrow></mml:mfrac></mml:math></inline-formula> ratio, for all subjects in each channel. The results suggest that the data does not have normality. Therefore, we use the Mann-Whitney U test to compare power differences between different sustained attention levels. We found the following patterns in EEG data that could represent sustained attention during the task, as shown in <xref ref-type="fig" rid="F4">Figure 4</xref>, where the red circle represents the channels with significant differences between groups.</p>
<fig id="F4" position="float">
<label>FIGURE 4</label>
<caption><p>Patterns in EEG data that could represent sustained attention, for EEG power in &#x03B1;, &#x03B2;, &#x03B3; bands, and <inline-formula><mml:math id="INEQ69"><mml:mfrac><mml:mi mathvariant="normal">&#x03B2;</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi mathvariant="normal">&#x03B1;</mml:mi><mml:mo>+</mml:mo><mml:mi mathvariant="normal">&#x03B8;</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mfrac></mml:math></inline-formula> ratio.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnhum-18-1467403-g004.tif"/>
</fig>
<p>As shown in <xref ref-type="table" rid="T6">Table 6</xref> and <xref ref-type="fig" rid="F4">Figure 4a</xref>, the analysis of &#x03B1; band indicates that power in the C3 channel is significantly lower in high sustained attention state compared to low sustained attention state. This concurs with prior research, that found &#x03B1; wave activity in the C3 to be generally implicated in relaxation (<xref ref-type="bibr" rid="B16">Klimesch, 1999</xref>), meaning that decrease in activity can indicate heightened attention.</p>
<table-wrap position="float" id="T6">
<label>TABLE 6</label>
<caption><p>Channels with significant differences in &#x03B1; band power by attention state.</p></caption>
<table cellspacing="5" cellpadding="5" frame="box" rules="all">
<thead>
<tr>
<td valign="top" align="left" style="color:#ffffff;background-color: #7f8080;">Freq.</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Channel</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Sample size</td>
<td valign="top" align="center" colspan="2" style="color:#ffffff;background-color: #7f8080;">High</td>
<td valign="top" align="center" colspan="2" style="color:#ffffff;background-color: #7f8080;">Low</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">U</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"><italic>p</italic></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Cohen&#x2019;s <italic>d</italic></td>
</tr>
<tr>
<td valign="top" align="left" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Mean</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">SD</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Mean</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">SD</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">&#x03B1;</td>
<td valign="top" align="center">C3</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.0093</td>
<td valign="top" align="center">0.0017</td>
<td valign="top" align="center">0.0107</td>
<td valign="top" align="center">0.0140</td>
<td valign="top" align="center">38.0</td>
<td valign="top" align="center">0.050<xref ref-type="table-fn" rid="t6fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.14</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="t6fns1"><p>&#x002A;<italic>p</italic> &#x003C; 0.05.</p></fn>
</table-wrap-foot>
</table-wrap>
<p>As shown in <xref ref-type="table" rid="T7">Table 7</xref> and <xref ref-type="fig" rid="F4">Figure 4b</xref>, for &#x03B2; band, analysis indicate that the power in the F7 and POZ channels is significantly higher in high sustained attention state compared to low sustained attention state. Correspondingly, &#x03B2; wave activity has been associated with attention and alertness (<xref ref-type="bibr" rid="B26">Rouhinen et al., 2013</xref>).</p>
<table-wrap position="float" id="T7">
<label>TABLE 7</label>
<caption><p>Channels with significant differences in &#x03B2; band power by attention state.</p></caption>
<table cellspacing="5" cellpadding="5" frame="box" rules="all">
<thead>
<tr>
<td valign="top" align="left" style="color:#ffffff;background-color: #7f8080;">Freq.</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Channel</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Sample size</td>
<td valign="top" align="center" colspan="2" style="color:#ffffff;background-color: #7f8080;">High</td>
<td valign="top" align="center" colspan="2" style="color:#ffffff;background-color: #7f8080;">Low</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">U</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"><italic>p</italic></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Cohen&#x2019;s <italic>d</italic></td>
</tr>
<tr>
<td valign="top" align="left" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Mean</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">SD</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Mean</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">SD</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left" rowspan="2">&#x03B2;</td>
<td valign="top" align="center">F7</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.0068</td>
<td valign="top" align="center">0.0029</td>
<td valign="top" align="center">0.0045</td>
<td valign="top" align="center">0.0004</td>
<td valign="top" align="center">34.0</td>
<td valign="top" align="center">0.027<xref ref-type="table-fn" rid="t7fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.66</td>
</tr>
<tr>
<td valign="top" align="center">POZ</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.0070</td>
<td valign="top" align="center">0.0017</td>
<td valign="top" align="center">0.0060</td>
<td valign="top" align="center">0.0049</td>
<td valign="top" align="center">37.5</td>
<td valign="top" align="center">0.046<xref ref-type="table-fn" rid="t7fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.27</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="t7fns1"><p>&#x002A;<italic>p</italic> &#x003C; 0.05.</p></fn>
</table-wrap-foot>
</table-wrap>
<p>As shown in <xref ref-type="table" rid="T8">Table 8</xref> and <xref ref-type="fig" rid="F4">Figure 4c</xref>, for &#x03B3; band, the analysis show that powers in the AF3, C4, CP2, CZ, F3, F7, FC1, FC2, FC5, P4, and POZ channels are significantly higher in high sustained attention state compared to low sustained attention state. In congruence with prior studies, &#x03B3; wave activity is closely related to higher cognitive functions and information processing (<xref ref-type="bibr" rid="B21">Masuda, 2009</xref>; <xref ref-type="bibr" rid="B26">Rouhinen et al., 2013</xref>), which may be more active during high sustained attention.</p>
<table-wrap position="float" id="T8">
<label>TABLE 8</label>
<caption><p>Channels with significant differences in &#x03B3; band power by attention state.</p></caption>
<table cellspacing="5" cellpadding="5" frame="box" rules="all">
<thead>
<tr>
<td valign="top" align="left" style="color:#ffffff;background-color: #7f8080;">Freq.</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Channel</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Sample size</td>
<td valign="top" align="center" colspan="2" style="color:#ffffff;background-color: #7f8080;">High</td>
<td valign="top" align="center" colspan="2" style="color:#ffffff;background-color: #7f8080;">Low</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">U</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"><italic>P</italic></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Cohen&#x2019;s <italic>d</italic></td>
</tr>
<tr>
<td valign="top" align="left" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Mean</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">SD</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Mean</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">SD</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left" rowspan="11">&#x03B3;</td>
<td valign="top" align="center">AF3</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.0044</td>
<td valign="top" align="center">0.0011</td>
<td valign="top" align="center">0.0034</td>
<td valign="top" align="center">0.0005</td>
<td valign="top" align="center">33.0</td>
<td valign="top" align="center">0.023<xref ref-type="table-fn" rid="t8fns1">&#x002A;</xref></td>
<td valign="top" align="center">1.17</td>
</tr>
<tr>
<td valign="top" align="center">C4</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.0045</td>
<td valign="top" align="center">0.0011</td>
<td valign="top" align="center">0.0032</td>
<td valign="top" align="center">0.0006</td>
<td valign="top" align="center">36.5</td>
<td valign="top" align="center">0.040<xref ref-type="table-fn" rid="t8fns1">&#x002A;</xref></td>
<td valign="top" align="center">1.47</td>
</tr>
<tr>
<td valign="top" align="center">CP2</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.0046</td>
<td valign="top" align="center">0.0009</td>
<td valign="top" align="center">0.0038</td>
<td valign="top" align="center">0.0007</td>
<td valign="top" align="center">37.5</td>
<td valign="top" align="center">0.045<xref ref-type="table-fn" rid="t8fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.99</td>
</tr>
<tr>
<td valign="top" align="center">CZ</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.0044</td>
<td valign="top" align="center">0.0009</td>
<td valign="top" align="center">0.0037</td>
<td valign="top" align="center">0.0006</td>
<td valign="top" align="center">36.0</td>
<td valign="top" align="center">0.037<xref ref-type="table-fn" rid="t8fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.92</td>
</tr>
<tr>
<td valign="top" align="center">F3</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.0043</td>
<td valign="top" align="center">0.0010</td>
<td valign="top" align="center">0.0035</td>
<td valign="top" align="center">0.0005</td>
<td valign="top" align="center">36.0</td>
<td valign="top" align="center">0.037<xref ref-type="table-fn" rid="t8fns1">&#x002A;</xref></td>
<td valign="top" align="center">1.01</td>
</tr>
<tr>
<td valign="top" align="center">F7</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.0056</td>
<td valign="top" align="center">0.0028</td>
<td valign="top" align="center">0.0031</td>
<td valign="top" align="center">0.0006</td>
<td valign="top" align="center">26.5</td>
<td valign="top" align="center">0.007<xref ref-type="table-fn" rid="t8fns2">&#x002A;&#x002A;</xref></td>
<td valign="top" align="center">1.23</td>
</tr>
<tr>
<td valign="top" align="center">FC1</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.0044</td>
<td valign="top" align="center">0.0009</td>
<td valign="top" align="center">0.0036</td>
<td valign="top" align="center">0.0006</td>
<td valign="top" align="center">35.0</td>
<td valign="top" align="center">0.031<xref ref-type="table-fn" rid="t8fns1">&#x002A;</xref></td>
<td valign="top" align="center">1.05</td>
</tr>
<tr>
<td valign="top" align="center">FC2</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.0044</td>
<td valign="top" align="center">0.0010</td>
<td valign="top" align="center">0.0036</td>
<td valign="top" align="center">0.0006</td>
<td valign="top" align="center">35.5</td>
<td valign="top" align="center">0.017<xref ref-type="table-fn" rid="t8fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.97</td>
</tr>
<tr>
<td valign="top" align="center">FC5</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.0045</td>
<td valign="top" align="center">0.0014</td>
<td valign="top" align="center">0.0034</td>
<td valign="top" align="center">0.0007</td>
<td valign="top" align="center">36.5</td>
<td valign="top" align="center">0.040<xref ref-type="table-fn" rid="t8fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.99</td>
</tr>
<tr>
<td valign="top" align="center">P4</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.0052</td>
<td valign="top" align="center">0.0011</td>
<td valign="top" align="center">0.0043</td>
<td valign="top" align="center">0.0012</td>
<td valign="top" align="center">36.5</td>
<td valign="top" align="center">0.020<xref ref-type="table-fn" rid="t8fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.78</td>
</tr>
<tr>
<td valign="top" align="center">POZ</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.0053</td>
<td valign="top" align="center">0.0016</td>
<td valign="top" align="center">0.0040</td>
<td valign="top" align="center">0.0008</td>
<td valign="top" align="center">34.0</td>
<td valign="top" align="center">0.027<xref ref-type="table-fn" rid="t8fns1">&#x002A;</xref></td>
<td valign="top" align="center">1.03</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="t8fns1"><p>&#x002A;<italic>p</italic> &#x003C; 0.05,</p></fn>
<fn id="t8fns2"><p>&#x002A;&#x002A;<italic>p</italic> &#x003C; 0.01.</p></fn>
</table-wrap-foot>
</table-wrap>
<p>For <inline-formula><mml:math id="INEQ106"><mml:mfrac><mml:mi mathvariant="normal">&#x03B2;</mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x03B1;</mml:mi><mml:mo>+</mml:mo><mml:mi mathvariant="normal">&#x03B8;</mml:mi></mml:mrow></mml:mfrac></mml:math></inline-formula> ratio, the analysis indicates that this ratio is significantly higher in high sustained attention state compared to low sustained attention state, as shown in <xref ref-type="table" rid="T9">Table 9</xref> and <xref ref-type="fig" rid="F4">Figure 4d</xref>. This is consistent with research, that suggested the <inline-formula><mml:math id="INEQ107"><mml:mfrac><mml:mi mathvariant="normal">&#x03B2;</mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x03B1;</mml:mi><mml:mo>+</mml:mo><mml:mi mathvariant="normal">&#x03B8;</mml:mi></mml:mrow></mml:mfrac></mml:math></inline-formula> ratio reflects changes in attention and alertness (<xref ref-type="bibr" rid="B33">Wang et al., 2011</xref>).</p>
<table-wrap position="float" id="T9">
<label>TABLE 9</label>
<caption><p>Channels with significant differences in <inline-formula><mml:math id="INEQ104"><mml:mfrac><mml:mi mathvariant="normal">&#x03B2;</mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x03B1;</mml:mi><mml:mo>+</mml:mo><mml:mi mathvariant="normal">&#x03B8;</mml:mi></mml:mrow></mml:mfrac></mml:math></inline-formula> ratio by attention state.</p></caption>
<table cellspacing="5" cellpadding="5" frame="box" rules="all">
<thead>
<tr>
<td valign="top" align="left" style="color:#ffffff;background-color: #7f8080;">Freq.</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Channel</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Sample size</td>
<td valign="top" align="center" colspan="2" style="color:#ffffff;background-color: #7f8080;">High</td>
<td valign="top" align="center" colspan="2" style="color:#ffffff;background-color: #7f8080;">Low</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">U</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"><italic>p</italic></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Cohen&#x2019;s <italic>d</italic></td>
</tr>
<tr>
<td valign="top" align="left" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Mean</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">SD</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">Mean</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;">SD</td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
<td valign="top" align="center" style="color:#ffffff;background-color: #7f8080;"></td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left" rowspan="19"><inline-formula><mml:math id="INEQ105"><mml:mfrac><mml:mi mathvariant="normal">&#x03B2;</mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x03B1;</mml:mi><mml:mo>+</mml:mo><mml:mi mathvariant="normal">&#x03B8;</mml:mi></mml:mrow></mml:mfrac></mml:math></inline-formula></td>
<td valign="top" align="center">AF3</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.2943</td>
<td valign="top" align="center">0.0260</td>
<td valign="top" align="center">0.2675</td>
<td valign="top" align="center">0.0315</td>
<td valign="top" align="center">32.0</td>
<td valign="top" align="center">0.021<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.93</td>
</tr>
<tr>
<td valign="top" align="center">AF4</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.3206</td>
<td valign="top" align="center">0.0493</td>
<td valign="top" align="center">0.2760</td>
<td valign="top" align="center">0.0259</td>
<td valign="top" align="center">28.0</td>
<td valign="top" align="center">0.010<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">1.13</td>
</tr>
<tr>
<td valign="top" align="center">C3</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.3018</td>
<td valign="top" align="center">0.0190</td>
<td valign="top" align="center">0.2712</td>
<td valign="top" align="center">0.0377</td>
<td valign="top" align="center">27.0</td>
<td valign="top" align="center">0.008<xref ref-type="table-fn" rid="t9fns2">&#x002A;&#x002A;</xref></td>
<td valign="top" align="center">1.03</td>
</tr>
<tr>
<td valign="top" align="center">C4</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.3021</td>
<td valign="top" align="center">0.0291</td>
<td valign="top" align="center">0.2665</td>
<td valign="top" align="center">0.0298</td>
<td valign="top" align="center">28.0</td>
<td valign="top" align="center">0.010<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">1.21</td>
</tr>
<tr>
<td valign="top" align="center">CP1</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.2995</td>
<td valign="top" align="center">0.0158</td>
<td valign="top" align="center">0.2687</td>
<td valign="top" align="center">0.0398</td>
<td valign="top" align="center">36.0</td>
<td valign="top" align="center">0.039<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">1.02</td>
</tr>
<tr>
<td valign="top" align="center">CP2</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.2958</td>
<td valign="top" align="center">0.0186</td>
<td valign="top" align="center">0.2634</td>
<td valign="top" align="center">0.0336</td>
<td valign="top" align="center">31.0</td>
<td valign="top" align="center">0.017<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">1.19</td>
</tr>
<tr>
<td valign="top" align="center">CP6</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.3300</td>
<td valign="top" align="center">0.0315</td>
<td valign="top" align="center">0.2943</td>
<td valign="top" align="center">0.0321</td>
<td valign="top" align="center">31.0</td>
<td valign="top" align="center">0.017<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">1.12</td>
</tr>
<tr>
<td valign="top" align="center">CZ</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.2814</td>
<td valign="top" align="center">0.0214</td>
<td valign="top" align="center">0.2565</td>
<td valign="top" align="center">0.0279</td>
<td valign="top" align="center">37.0</td>
<td valign="top" align="center">0.045<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.99</td>
</tr>
<tr>
<td valign="top" align="center">F3</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.2878</td>
<td valign="top" align="center">0.0214</td>
<td valign="top" align="center">0.2645</td>
<td valign="top" align="center">0.0323</td>
<td valign="top" align="center">28.0</td>
<td valign="top" align="center">0.010<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.85</td>
</tr>
<tr>
<td valign="top" align="center">F4</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.2973</td>
<td valign="top" align="center">0.0455</td>
<td valign="top" align="center">0.2570</td>
<td valign="top" align="center">0.0266</td>
<td valign="top" align="center">32.0</td>
<td valign="top" align="center">0.020<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">1.08</td>
</tr>
<tr>
<td valign="top" align="center">FC1</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.2792</td>
<td valign="top" align="center">0.0188</td>
<td valign="top" align="center">0.2573</td>
<td valign="top" align="center">0.0257</td>
<td valign="top" align="center">36.0</td>
<td valign="top" align="center">0.039<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.97</td>
</tr>
<tr>
<td valign="top" align="center">FC2</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.2810</td>
<td valign="top" align="center">0.0296</td>
<td valign="top" align="center">0.2537</td>
<td valign="top" align="center">0.0278</td>
<td valign="top" align="center">37.0</td>
<td valign="top" align="center">0.045<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.95</td>
</tr>
<tr>
<td valign="top" align="center">FC5</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.3039</td>
<td valign="top" align="center">0.0391</td>
<td valign="top" align="center">0.2738</td>
<td valign="top" align="center">0.0499</td>
<td valign="top" align="center">35.0</td>
<td valign="top" align="center">0.033<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.67</td>
</tr>
<tr>
<td valign="top" align="center">FPZ</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.3424</td>
<td valign="top" align="center">0.0921</td>
<td valign="top" align="center">0.2704</td>
<td valign="top" align="center">0.0250</td>
<td valign="top" align="center">25.0</td>
<td valign="top" align="center">0.003<xref ref-type="table-fn" rid="t9fns2">&#x002A;&#x002A;</xref></td>
<td valign="top" align="center">1.07</td>
</tr>
<tr>
<td valign="top" align="center">FZ</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.2755</td>
<td valign="top" align="center">0.0251</td>
<td valign="top" align="center">0.2943</td>
<td valign="top" align="center">0.0321</td>
<td valign="top" align="center">37.0</td>
<td valign="top" align="center">0.043<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.65</td>
</tr>
<tr>
<td valign="top" align="center">P4</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.3249</td>
<td valign="top" align="center">0.0271</td>
<td valign="top" align="center">0.2923</td>
<td valign="top" align="center">0.0482</td>
<td valign="top" align="center">35.0</td>
<td valign="top" align="center">0.033<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.83</td>
</tr>
<tr>
<td valign="top" align="center">P8</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.3760</td>
<td valign="top" align="center">0.0429</td>
<td valign="top" align="center">0.3316</td>
<td valign="top" align="center">0.0400</td>
<td valign="top" align="center">29.0</td>
<td valign="top" align="center">0.012<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">1.07</td>
</tr>
<tr>
<td valign="top" align="center">POZ</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.3126</td>
<td valign="top" align="center">0.0476</td>
<td valign="top" align="center">0.2581</td>
<td valign="top" align="center">0.0344</td>
<td valign="top" align="center">23.0</td>
<td valign="top" align="center">0.004<xref ref-type="table-fn" rid="t9fns2">&#x002A;&#x002A;</xref></td>
<td valign="top" align="center">1.31</td>
</tr>
<tr>
<td valign="top" align="center">PO4</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">0.3255</td>
<td valign="top" align="center">0.0171</td>
<td valign="top" align="center">0.2996</td>
<td valign="top" align="center">0.0400</td>
<td valign="top" align="center">36.0</td>
<td valign="top" align="center">0.039<xref ref-type="table-fn" rid="t9fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.84</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="t9fns1"><p>&#x002A;<italic>p</italic> &#x003C; 0.05,</p></fn>
<fn id="t9fns2"><p>&#x002A;&#x002A;<italic>p</italic> &#x003C; 0.01.</p></fn>
</table-wrap-foot>
</table-wrap>
<p>EEG results from this study indicate significant differences between high and low sustained attention conditions in the power of &#x03B1;, &#x03B2;, &#x03B3; bands, as well as <inline-formula><mml:math id="INEQ108"><mml:mfrac><mml:mi mathvariant="normal">&#x03B2;</mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x03B1;</mml:mi><mml:mo>+</mml:mo><mml:mi mathvariant="normal">&#x03B8;</mml:mi></mml:mrow></mml:mfrac></mml:math></inline-formula> ratio. In high sustained attention state, EEG power in &#x03B1; band decreases in the C3 channel, &#x03B2; band power increases in the F7 and POZ channels, and both &#x03B3; band power and <inline-formula><mml:math id="INEQ109"><mml:mfrac><mml:mi mathvariant="normal">&#x03B2;</mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x03B1;</mml:mi><mml:mo>+</mml:mo><mml:mi mathvariant="normal">&#x03B8;</mml:mi></mml:mrow></mml:mfrac></mml:math></inline-formula> increase in the parietal and frontal regions. These findings corroborate previous studies that found &#x03B1; wave power decrease, as well as &#x03B2; and &#x03B3; wave power increase during high sustained attention states. This might be due to the increase in cognitive resources required for high sustained attention tasks (<xref ref-type="bibr" rid="B1">Ba&#x015F;ar et al., 2001</xref>; <xref ref-type="bibr" rid="B16">Klimesch, 1999</xref>).</p>
</sec>
<sec id="S3.SS5">
<title>3.5 PPG data analysis</title>
<p>We also examine heart rate variability (HRV) characteristics in participants under high and low sustained attention states, calculated from PPG data. Considering the sample size in general PPG data analysis, we divided each task into two halves, and HRV features are extracted from each half, resulting in 48 samples. After normality testing, HRV indices VLF[%], LF[%], HF[%], LF/HF, LF[n.u.], and HF[n.u.] follow a normal distribution, while MEANRR, MEDIANRR, MEANHR, SDNN, RMSSD, NN50, pNN50, VLF[abs], LF[abs], and HF[abs] do not.</p>
<p>Indices that adhere to normality are analyzed with <italic>t</italic>-test. Among these, HF[%] shows a significant decrease in high sustained attention state (<italic>M</italic> = 35.94, <italic>SD</italic> = 10.03) compared to low sustained attention state (<italic>M</italic> = 42.24, <italic>SD</italic> = 10.80), <italic>t</italic>(46)=2.09, <italic>p</italic> = 0.042, <italic>Cohen</italic>&#x2032;<italic>sd</italic> = 0.60. Indices that do not adhere to normality are analyzed with the Mann-Whitney U test. Among them, VLF[abs] displays a significant increase in high sustained attention state (<italic>Md</italic> = 868.66) compared to low sustained attention state (<italic>Md</italic> = 500.26), <italic>U</italic> = 176.50, <italic>p</italic> = 0.021, <italic>Cohen</italic>&#x2032;<italic>sd</italic> = 0.82. These findings align with previous research that found high sustained attention tasks to require more cognitive resources, which lead to changes in autonomic nervous system regulation: reduced parasympathetic activity, associated with decrease in HF[%], and increased sympathetic activity, associated with increase in VLF[abs] (<xref ref-type="bibr" rid="B17">Krygier et al., 2013</xref>; <xref ref-type="bibr" rid="B31">Thayer et al., 2012</xref>).</p>
</sec>
</sec>
<sec id="S4" sec-type="discussion">
<title>4 Discussion</title>
<p>As elaborated in previous sections, literature on emotions&#x2019; effect on attention has yielded mixed results. This study takes a step toward resolving existing contradictions by improving upon methodology: using more ecologically valid VR videos to induce emotions, and measuring sustained attention directly with EEG and PPG. Additionally, the study enhances the analysis by taking gender differences into consideration and using sustained attention as a factor to account for the quality of attention. Results show that for females, sustained attention levels (i.e. quality of attention) are significantly higher during high-arousal positive emotions compared to low-arousal positive emotions, while for males, sustained attention levels during high-arousal negative emotions are significantly higher than during high-arousal positive emotions. In particular, the findings of this study could be applied to educational settings to enhance learning outcomes. For example, understanding the impact of emotions on sustained attention could inform instructional design, suggesting that educators might tailor learning environments to evoke positive high-arousal states in students, potentially improving their engagement and performance. Similarly, in professional training and workplace settings, creating emotionally positive and stimulating environments could enhance employees&#x2019; focus and productivity.</p>
<p>Moreover, this study clarifies the relationship between flow experience and sustained attention, showing that there is no significant association under the context of timed AX-CPT tasks. This corresponds with previous research testing flow experience with timed tasks, such as <xref ref-type="bibr" rid="B32">Ullen et al. (2012)</xref>, but points toward a possible link between flow experience and ecological validity of the experiment task. At the same time, our analysis on EEG and PPG provide insight into how heightened sustained attention is directly reflected in brain activity. Results from EEG data enables looking specifically at frequency bands related to attention and sustained attention, while raising a concern that may be related to the mixed results in previous research. That is, in subsequent studies, it may be worth considering using similar direct measurements to further distinguish between responses resulting from high sustained attention versus from emotional arousal.</p>
<p>This study uses a relatively small sample size. Future studies in related directions should consider using larger samples, while taking gender differences and quality of sustained attention into account when analyzing attention task performance. Potential future research directions can include exploring the application of these results to other types of attention tasks with more ecological validity, such as reading, writing, gaming, as well as untimed tasks. Also, researchers could investigate the adaptive contexts that brought forth these gender differences in emotion induction responses. In doing so, the effects of emotion on attention/engagement and flow experience in different contexts could be further explored, to point toward a more systematic, unified theory, that could be applied to improve performance in complex real-world contexts. This broader application could guide the development of more effective strategies in education, training, and therapy, ultimately enhancing individual performance and well-being.</p>
</sec>
</body>
<back>
<sec id="S5" sec-type="data-availability">
<title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec id="S6" sec-type="ethics-statement">
<title>Ethics statement</title>
<p>The studies involving humans were approved by the Institutional Review Board of Beijing Normal University (No. BNU202212080135). The studies were conducted in accordance with the local legislation and institutional requirements. The participants provided their written informed consent to participate in this study.</p>
</sec>
<sec id="S7" sec-type="author-contributions">
<title>Author contributions</title>
<p>YS: Conceptualization, Formal analysis, Funding acquisition, Writing &#x2013; original draft. HZ: Visualization, Writing &#x2013; original draft, Methodology. YL: Methodology, Software, Writing &#x2013; original draft. XT: Conceptualization, Funding acquisition, Supervision, Writing &#x2013; review and editing.</p>
</sec>
<sec id="S8" sec-type="funding-information">
<title>Funding</title>
<p>The author(s) declare financial support was received for the research, authorship, and/or publication of the article. This work was supported by the National Natural Science Foundation of China under Grants 62307003 and 62207002.</p>
</sec>
<sec id="S9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="S10" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ba&#x015F;ar</surname> <given-names>E.</given-names></name> <name><surname>Ba&#x015F;ar-Eroglu</surname> <given-names>C.</given-names></name> <name><surname>Karaka&#x015F;</surname> <given-names>S.</given-names></name> <name><surname>Sch&#x00FC;rmann</surname> <given-names>M.</given-names></name></person-group> (<year>2001</year>). <article-title>Gamma, alpha, delta, and theta oscillations govern cognitive processes.</article-title> <source><italic>Int. J. Psychophysiol.</italic></source> <volume>39</volume> <fpage>241</fpage>&#x2013;<lpage>248</lpage>.</citation></ref>
<ref id="B2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Baykal</surname> <given-names>B.</given-names></name></person-group> (<year>2022</year>). <source><italic>Temporal effects of top-down emotion regulation strategies on affect, working memory load, and attentional deployment.</italic></source> <publisher-loc>Houston, TX</publisher-loc>: <publisher-name>Faculty of The University of Houston-Clear Lake</publisher-name>.</citation></ref>
<ref id="B3"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bradley</surname> <given-names>M. M.</given-names></name> <name><surname>Lang</surname> <given-names>P. J.</given-names></name></person-group> (<year>1994</year>). <article-title>Measuring emotion: The self-assessment manikin and the semantic differential.</article-title> <source><italic>J. Behav. Ther. Exp. Psychiatry</italic></source> <volume>25</volume> <fpage>49</fpage>&#x2013;<lpage>59</lpage>.</citation></ref>
<ref id="B4"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bradley</surname> <given-names>M. M.</given-names></name> <name><surname>Codispoti</surname> <given-names>M.</given-names></name> <name><surname>Sabatinelli</surname> <given-names>D.</given-names></name> <name><surname>Lang</surname> <given-names>P. J.</given-names></name></person-group> (<year>2001</year>). <article-title>Emotion and motivation II: Sex differences in picture processing.</article-title> <source><italic>Emotion</italic></source> <volume>1</volume> <fpage>300</fpage>&#x2013;<lpage>319</lpage>.</citation></ref>
<ref id="B5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Braver</surname> <given-names>T. S.</given-names></name> <name><surname>Gray</surname> <given-names>J. R.</given-names></name> <name><surname>Burgess</surname> <given-names>G. C.</given-names></name></person-group> (<year>2007</year>). &#x201C;<article-title>Explaining the many varieties of working memory variation: Dual mechanisms of cognitive control</article-title>,&#x201D; in <source><italic>Variation in working memory</italic></source>, <role>eds</role> <person-group person-group-type="editor"><name><surname>Conway</surname> <given-names>A. R. A.</given-names></name> <name><surname>Jarrold</surname> <given-names>C.</given-names></name> <name><surname>Kane</surname> <given-names>M. J.</given-names></name> <name><surname>Miyake</surname> <given-names>A.</given-names></name> <name><surname>Towse</surname> <given-names>J. N.</given-names></name></person-group> (<publisher-loc>Oxford</publisher-loc>: <publisher-name>Oxford University Press</publisher-name>), <fpage>76</fpage>&#x2013;<lpage>106</lpage>.</citation></ref>
<ref id="B6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Brosch</surname> <given-names>T.</given-names></name> <name><surname>Scherer</surname> <given-names>K. R.</given-names></name> <name><surname>Grandjean</surname> <given-names>D.</given-names></name> <name><surname>Sander</surname> <given-names>D.</given-names></name></person-group> (<year>2013</year>). <article-title>The impact of emotion on perception, attention, memory, and decision-making.</article-title> <source><italic>Swiss Med. Wkly.</italic></source> <volume>143</volume>:<issue>w13786</issue>.</citation></ref>
<ref id="B7"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Compton</surname> <given-names>R. J.</given-names></name></person-group> (<year>2003</year>). <article-title>The interface between emotion and attention: A review of evidence from psychology and neuroscience.</article-title> <source><italic>Behav. Cogn. Neurosci. Rev.</italic></source> <volume>2</volume> <fpage>115</fpage>&#x2013;<lpage>129</lpage>. <pub-id pub-id-type="doi">10.1177/1534582303255278</pub-id> <pub-id pub-id-type="pmid">13678519</pub-id></citation></ref>
<ref id="B8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Csikszentmihalyi</surname> <given-names>M.</given-names></name></person-group> (<year>1975</year>). <source><italic>Beyond boredom and anxiety</italic></source>, <edition>1st Edn</edition>. <publisher-loc>Hoboken, NJ</publisher-loc>: <publisher-name>Jossey-Bass Publishers</publisher-name>.</citation></ref>
<ref id="B9"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Delorme</surname> <given-names>A.</given-names></name> <name><surname>Makeig</surname> <given-names>S.</given-names></name></person-group> (<year>2004</year>). <article-title>EEGLAB: An open-source toolbox for analysis of single-trial EEG dynamics.</article-title> <source><italic>J. Neurosci. Methods</italic></source> <volume>134</volume> <fpage>9</fpage>&#x2013;<lpage>21</lpage>.</citation></ref>
<ref id="B10"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Engeser</surname> <given-names>S.</given-names></name> <name><surname>Rheinberg</surname> <given-names>F.</given-names></name></person-group> (<year>2008</year>). <article-title>Flow, performance and moderators of challenge-skill balance.</article-title> <source><italic>Motiv. Emot.</italic></source> <volume>32</volume> <fpage>158</fpage>&#x2013;<lpage>172</lpage>.</citation></ref>
<ref id="B11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gasper</surname> <given-names>K.</given-names></name> <name><surname>Clore</surname> <given-names>G. L.</given-names></name></person-group> (<year>2002</year>). <article-title>Attending to the big picture: Mood and global versus local processing of visual information.</article-title> <source><italic>Psychol. Sci.</italic></source> <volume>13</volume> <fpage>34</fpage>&#x2013;<lpage>40</lpage>. <pub-id pub-id-type="doi">10.1111/1467-9280.00406</pub-id> <pub-id pub-id-type="pmid">11892776</pub-id></citation></ref>
<ref id="B12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gramfort</surname> <given-names>A.</given-names></name> <name><surname>Luessi</surname> <given-names>M.</given-names></name> <name><surname>Larson</surname> <given-names>E.</given-names></name> <name><surname>Engemann</surname> <given-names>D. A.</given-names></name> <name><surname>Strohmeier</surname> <given-names>D.</given-names></name> <name><surname>Brodbeck</surname> <given-names>C.</given-names></name><etal/></person-group> (<year>2013</year>). <article-title>MEG and EEG data analysis with MNE-Python.</article-title> <source><italic>Front. Neurosci.</italic></source> <volume>7</volume>:<issue>267</issue>. <pub-id pub-id-type="doi">10.3389/fnins.2013.00267</pub-id> <pub-id pub-id-type="pmid">24431986</pub-id></citation></ref>
<ref id="B13"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Harris</surname> <given-names>D. J.</given-names></name> <name><surname>Allen</surname> <given-names>K. L.</given-names></name> <name><surname>Vine</surname> <given-names>S. J.</given-names></name> <name><surname>Wilson</surname> <given-names>M. R.</given-names></name></person-group> (<year>2021</year>). <article-title>A systematic review and meta-analysis of the relationship between flow states and performance.</article-title> <source><italic>Int. Rev. Sport Exerc. Psychol.</italic></source> <volume>16</volume> <fpage>693</fpage>&#x2013;<lpage>721</lpage>.</citation></ref>
<ref id="B14"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Harris</surname> <given-names>F. J.</given-names></name></person-group> (<year>1978</year>). <article-title>On the use of windows for harmonic analysis with the discrete Fourier transform.</article-title> <source><italic>Proc. IEEE</italic></source> <volume>66</volume> <fpage>51</fpage>&#x2013;<lpage>83</lpage>.</citation></ref>
<ref id="B15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jefferies</surname> <given-names>L. N.</given-names></name> <name><surname>Smilek</surname> <given-names>D.</given-names></name> <name><surname>Eich</surname> <given-names>E.</given-names></name> <name><surname>Enns</surname> <given-names>J. T.</given-names></name></person-group> (<year>2008</year>). <article-title>Emotional valence and arousal interact in attentional control.</article-title> <source><italic>Psychol. Sci.</italic></source> <volume>19</volume> <fpage>290</fpage>&#x2013;<lpage>295</lpage>.</citation></ref>
<ref id="B16"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Klimesch</surname> <given-names>W.</given-names></name></person-group> (<year>1999</year>). <article-title>EEG alpha and theta oscillations reflect cognitive and memory performance: A review and analysis.</article-title> <source><italic>Brain Res. Rev.</italic></source> <volume>29</volume> <fpage>169</fpage>&#x2013;<lpage>195</lpage>.</citation></ref>
<ref id="B17"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Krygier</surname> <given-names>J. R.</given-names></name> <name><surname>Heathers</surname> <given-names>J. A. J.</given-names></name> <name><surname>Shahrestani</surname> <given-names>S.</given-names></name> <name><surname>Abbott</surname> <given-names>M.</given-names></name> <name><surname>Gross</surname> <given-names>J. J.</given-names></name> <name><surname>Kemp</surname> <given-names>A. H.</given-names></name></person-group> (<year>2013</year>). <article-title>Mindfulness meditation, well-being, and heart rate variability: A preliminary investigation into the impact of intensive Vipassana meditation.</article-title> <source><italic>Int. J. Psychophysiol.</italic></source> <volume>89</volume> <fpage>305</fpage>&#x2013;<lpage>313</lpage>. <pub-id pub-id-type="doi">10.1016/j.ijpsycho.2013.06.017</pub-id> <pub-id pub-id-type="pmid">23797150</pub-id></citation></ref>
<ref id="B18"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kumar</surname> <given-names>S. M.</given-names></name> <name><surname>Vaishali</surname> <given-names>K.</given-names></name> <name><surname>Maiya</surname> <given-names>G. A.</given-names></name> <name><surname>Shivashankar</surname> <given-names>K. N.</given-names></name> <name><surname>Shashikiran</surname> <given-names>U.</given-names></name></person-group> (<year>2023</year>). <article-title>Analysis of time-domain indices, frequency domain measures of heart rate variability derived from ECG waveform and pulse-wave-related HRV among overweight individuals: An observational study.</article-title> <source><italic>F1000Research</italic></source> <volume>12</volume>:<issue>1229</issue>. <pub-id pub-id-type="doi">10.12688/f1000research.139283.1</pub-id> <pub-id pub-id-type="pmid">37799491</pub-id></citation></ref>
<ref id="B19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>B. J.</given-names></name> <name><surname>Bailenson</surname> <given-names>J. N.</given-names></name> <name><surname>Pines</surname> <given-names>A.</given-names></name> <name><surname>Greenleaf</surname> <given-names>W. J.</given-names></name> <name><surname>Williams</surname> <given-names>L. M.</given-names></name></person-group> (<year>2017</year>). <article-title>A public database of immersive VR videos with corresponding ratings of arousal, valence, and correlations between head movements and self report measures.</article-title> <source><italic>Front. Psychol.</italic></source> <volume>8</volume>:<issue>2116</issue>. <pub-id pub-id-type="doi">10.3389/fpsyg.2017.02116</pub-id> <pub-id pub-id-type="pmid">29259571</pub-id></citation></ref>
<ref id="B20"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Marty-Dugas</surname> <given-names>J.</given-names></name> <name><surname>Smilek</surname> <given-names>D.</given-names></name></person-group> (<year>2019</year>). <article-title>Deep, effortless concentration: Re-examining the flow concept and exploring relations with inattention, absorption, and personality.</article-title> <source><italic>Psychol. Res.</italic></source> <volume>83</volume> <fpage>1760</fpage>&#x2013;<lpage>1777</lpage>. <pub-id pub-id-type="doi">10.1007/s00426-018-1031-6</pub-id> <pub-id pub-id-type="pmid">29948186</pub-id></citation></ref>
<ref id="B21"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Masuda</surname> <given-names>N.</given-names></name></person-group> (<year>2009</year>). <article-title>Selective population rate coding: A possible computational role of gamma oscillations in selective attention.</article-title> <source><italic>Neural Comput.</italic></source> <volume>21</volume> <fpage>3335</fpage>&#x2013;<lpage>3362</lpage>. <pub-id pub-id-type="doi">10.1162/neco.2009.09-08-857</pub-id> <pub-id pub-id-type="pmid">19686062</pub-id></citation></ref>
<ref id="B22"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mather</surname> <given-names>M.</given-names></name> <name><surname>Sutherland</surname> <given-names>M. R.</given-names></name></person-group> (<year>2011</year>). <article-title>Arousal-biased competition in perception and memory.</article-title> <source><italic>Perspect. Psychol. Sci.</italic></source> <volume>6</volume> <fpage>114</fpage>&#x2013;<lpage>133</lpage>.</citation></ref>
<ref id="B23"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mitchell</surname> <given-names>J.</given-names></name></person-group> (<year>2022</year>). <article-title>Emotion and attention.</article-title> <source><italic>Philos. Stud.</italic></source> <volume>180</volume> <fpage>1</fpage>&#x2013;<lpage>27</lpage>.</citation></ref>
<ref id="B24"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pekrun</surname> <given-names>R.</given-names></name> <name><surname>Goetz</surname> <given-names>T.</given-names></name> <name><surname>Titz</surname> <given-names>W.</given-names></name> <name><surname>Perry</surname> <given-names>R. P.</given-names></name></person-group> (<year>2002</year>). <article-title>Academic emotions in students&#x2019; self-regulated learning and achievement: A program of qualitative and quantitative research.</article-title> <source><italic>Educ. Psychol.</italic></source> <volume>37</volume> <fpage>91</fpage>&#x2013;<lpage>105</lpage>.</citation></ref>
<ref id="B25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Phelps</surname> <given-names>E. A.</given-names></name> <name><surname>Ling</surname> <given-names>S.</given-names></name> <name><surname>Carrasco</surname> <given-names>M.</given-names></name></person-group> (<year>2006</year>). <article-title>Emotion facilitates perception and potentiates the perceptual benefits of attention.</article-title> <source><italic>Psychol. Sci.</italic></source> <volume>17</volume> <fpage>292</fpage>&#x2013;<lpage>299</lpage>.</citation></ref>
<ref id="B26"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rouhinen</surname> <given-names>S.</given-names></name> <name><surname>Panula</surname> <given-names>J.</given-names></name> <name><surname>Palva</surname> <given-names>J. M.</given-names></name> <name><surname>Palva</surname> <given-names>S.</given-names></name></person-group> (<year>2013</year>). <article-title>Load dependence of &#x03B2; and &#x03B3; oscillations predicts individual capacity of visual attention.</article-title> <source><italic>J. Neurosci.</italic></source> <volume>33</volume> <fpage>19023</fpage>&#x2013;<lpage>19033</lpage>.</citation></ref>
<ref id="B27"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Russell</surname> <given-names>J. A.</given-names></name></person-group> (<year>1980</year>). <article-title>A circumplex model of affect.</article-title> <source><italic>J. Pers. Soc. Psychol.</italic></source> <volume>39</volume> <fpage>1161</fpage>&#x2013;<lpage>1178</lpage>.</citation></ref>
<ref id="B28"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Schiefele</surname> <given-names>U.</given-names></name> <name><surname>Raabe</surname> <given-names>A.</given-names></name></person-group> (<year>2011</year>). <article-title>Skills-demands compatibility as a determinant of flow experience in an inductive reasoning task.</article-title> <source><italic>Psychol. Rep.</italic></source> <volume>109</volume> <fpage>428</fpage>&#x2013;<lpage>444</lpage>. <pub-id pub-id-type="doi">10.2466/04.22.PR0.109.5.428-444</pub-id> <pub-id pub-id-type="pmid">22238850</pub-id></citation></ref>
<ref id="B29"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Solomon</surname> <given-names>J. O. M.</given-names></name></person-group> (<year>1991</year>). <source><italic>PSD computations using Welch&#x2019;s method. [Power spectral density (PSD)] (SAND-91-1533).</italic></source> <publisher-loc>Albuquerque, NM</publisher-loc>: <publisher-name>Sandia National Lab</publisher-name>.</citation></ref>
<ref id="B30"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Swann</surname> <given-names>C.</given-names></name> <name><surname>Keegan</surname> <given-names>R. J.</given-names></name> <name><surname>Piggott</surname> <given-names>D.</given-names></name> <name><surname>Crust</surname> <given-names>L.</given-names></name></person-group> (<year>2012</year>). <article-title>A systematic review of the experience, occurrence, and controllability of flow states in elite sport.</article-title> <source><italic>Psychol. Sport Exerc.</italic></source> <volume>13</volume> <fpage>807</fpage>&#x2013;<lpage>819</lpage>.</citation></ref>
<ref id="B31"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Thayer</surname> <given-names>J. F.</given-names></name> <name><surname>Ahs</surname> <given-names>F.</given-names></name> <name><surname>Fredrikson</surname> <given-names>M.</given-names></name> <name><surname>Sollers</surname> <given-names>J. J.</given-names></name> <name><surname>Wager</surname> <given-names>T. D.</given-names></name></person-group> (<year>2012</year>). <article-title>A meta-analysis of heart rate variability and neuroimaging studies: Implications for heart rate variability as a marker of stress and health.</article-title> <source><italic>Neurosci. Biobehav. Rev.</italic></source> <volume>36</volume> <fpage>747</fpage>&#x2013;<lpage>756</lpage>. <pub-id pub-id-type="doi">10.1016/j.neubiorev.2011.11.009</pub-id> <pub-id pub-id-type="pmid">22178086</pub-id></citation></ref>
<ref id="B32"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ullen</surname> <given-names>F.</given-names></name> <name><surname>de Manzano</surname> <given-names>O.</given-names></name> <name><surname>Almeida</surname> <given-names>R.</given-names></name> <name><surname>Magnusson</surname> <given-names>P. K. E.</given-names></name> <name><surname>Pedersen</surname> <given-names>N. L.</given-names></name> <name><surname>Nakamura</surname> <given-names>J.</given-names></name><etal/></person-group> (<year>2012</year>). <article-title>Proneness for psychological flow in everyday life: Associations with personality and intelligence.</article-title> <source><italic>Pers. Individ. Differ.</italic></source> <volume>52</volume> <fpage>167</fpage>&#x2013;<lpage>172</lpage>.</citation></ref>
<ref id="B33"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>X.-W.</given-names></name> <name><surname>Nie</surname> <given-names>D.</given-names></name> <name><surname>Lu</surname> <given-names>B.-L.</given-names></name></person-group> (<year>2011</year>). &#x201C;<article-title>EEG-based emotion recognition using frequency domain features and support vector machines</article-title>,&#x201D; in <source><italic>Neural information processing</italic></source>, <volume>Vol. 7062</volume> <role>eds</role> <person-group person-group-type="editor"><name><surname>Lu</surname> <given-names>B.-L.</given-names></name> <name><surname>Zhang</surname> <given-names>L.</given-names></name> <name><surname>Kwok</surname> <given-names>J.</given-names></name></person-group> (<publisher-loc>Berlin Heidelberg</publisher-loc>: <publisher-name>Springer</publisher-name>), <fpage>734</fpage>&#x2013;<lpage>743</lpage>.</citation></ref>
<ref id="B34"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wass</surname> <given-names>S. V.</given-names></name> <name><surname>Smith</surname> <given-names>C. G.</given-names></name> <name><surname>Stubbs</surname> <given-names>L.</given-names></name> <name><surname>Clackson</surname> <given-names>K.</given-names></name> <name><surname>Mirza</surname> <given-names>F. U.</given-names></name></person-group> (<year>2021</year>). <article-title>Physiological stress, sustained attention, emotion regulation, and cognitive engagement in 12-month-old infants from urban environments.</article-title> <source><italic>Dev. Psychol.</italic></source> <volume>57</volume> <fpage>1179</fpage>&#x2013;<lpage>1194</lpage>. <pub-id pub-id-type="doi">10.1037/dev0001200</pub-id> <pub-id pub-id-type="pmid">34591564</pub-id></citation></ref>
<ref id="B35"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Westphal</surname> <given-names>A.</given-names></name> <name><surname>Kretschmann</surname> <given-names>J.</given-names></name> <name><surname>Gronostaj</surname> <given-names>A.</given-names></name> <name><surname>Vock</surname> <given-names>M.</given-names></name></person-group> (<year>2018</year>). <article-title>More enjoyment, less anxiety and boredom: How achievement emotions relate to academic self-concept and teachers&#x2019; diagnostic skills.</article-title> <source><italic>Learn. Individ. Differ.</italic></source> <volume>62</volume> <fpage>108</fpage>&#x2013;<lpage>117</lpage>.</citation></ref>
<ref id="B36"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Whitehill</surname> <given-names>J.</given-names></name> <name><surname>Serpell</surname> <given-names>Z.</given-names></name> <name><surname>Yi-Ching Lin, Foster</surname> <given-names>A.</given-names></name> <name><surname>Movellan</surname> <given-names>J. R.</given-names></name></person-group> (<year>2014</year>). <article-title>The faces of engagement: Automatic recognition of student engagement from facial expressions.</article-title> <source><italic>IEEE Trans. Affect. Comput.</italic></source> <volume>5</volume> <fpage>86</fpage>&#x2013;<lpage>98</lpage>.</citation></ref>
</ref-list>
</back>
</article>