<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" article-type="research-article" dtd-version="1.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Hum. Neurosci.</journal-id>
<journal-title-group>
<journal-title>Frontiers in Human Neuroscience</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Hum. Neurosci.</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">1662-5161</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fnhum.2025.1739802</article-id>
<article-version article-version-type="Corrected Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Original Research</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Differential arousal and neural engagement for angry and fearful faces: a combined pupillometric and fMRI study</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name><surname>Wende</surname><given-names>Kim C.</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3264427"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="software" vocab-term-identifier="https://credit.niso.org/contributor-roles/software/">Software</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x0026; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Kessler</surname><given-names>Roman</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/834825"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="visualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/visualization/">Visualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x0026; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Rusch</surname><given-names>Kristin M.</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x0026; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Sommer</surname><given-names>Jens</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/131166"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="software" vocab-term-identifier="https://credit.niso.org/contributor-roles/software/">Software</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x0026; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Jansen</surname><given-names>Andreas</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/84373"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x0026; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role>
</contrib>
</contrib-group>
<aff id="aff1"><label>1</label><institution>Department of Psychiatry and Psychotherapy, University of Marburg</institution>, <city>Marburg</city>, <country country="de">Germany</country></aff>
<aff id="aff2"><label>2</label><institution>Institute of Child and Adolescent Psychiatry, University of Kiel</institution>, <city>Kiel</city>, <country country="de">Germany</country></aff>
<aff id="aff3"><label>3</label><institution>Core-Facility Brainimaging, Faculty of Medicine, University of Marburg</institution>, <city>Marburg</city>, <country country="de">Germany</country></aff>
<author-notes>
<corresp id="c001"><label>&#x002A;</label>Correspondence: Kim C. Wende, <email xlink:href="mailto:kim.wende@staff.uni-marburg.de">kim.wende@staff.uni-marburg.de</email>; Andreas Jansen, <email xlink:href="mailto:kim.wende@staff.uni-marburg.de">andreas.jansen@staff.uni-marburg.de</email></corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-01-12">
<day>12</day>
<month>01</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="corrected" iso-8601-date="2026-02-18">
<day>18</day>
<month>02</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2025</year>
</pub-date>
<volume>19</volume>
<elocation-id>1739802</elocation-id>
<history>
<date date-type="received">
<day>05</day>
<month>11</month>
<year>2025</year>
</date>
<date date-type="rev-recd">
<day>02</day>
<month>12</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>05</day>
<month>12</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2026 Wende, Kessler, Rusch, Sommer and Jansen.</copyright-statement>
<copyright-year>2026</copyright-year>
<copyright-holder>Wende, Kessler, Rusch, Sommer and Jansen</copyright-holder>
<license>
<ali:license_ref start_date="2026-01-12">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<sec>
<title>Introduction</title>
<p>Understanding how emotions are encoded at the neural level remains a central challenge in human neuroscience. Facial expressions are among the most powerful and frequently used stimuli to study emotion processing. Face perception itself is a complex function supported by a core network&#x2014;including bilateral occipito-fusiform and superior temporal regions&#x2014;and an extended network involving anterior structures such as the bilateral amygdalae. However, previous findings on how emotional content modulates these networks have been inconsistent.</p>
</sec>
<sec>
<title>Methods</title>
<p>To disentangle perceptual and affective components of face emotion processing, we combined high-frequency pupillometry with functional magnetic resonance imaging (fMRI). Pupillary dilation serves as a sensitive index of two distinct processes: perceptual load, reflecting the informational complexity of a face, and arousal, indicating its immediate sensory impact. In our study, 25 participants (13 female) viewed faces expressing anger, fear, happiness, or neutrality as well as luminance-matched houses serving as control stimuli. A one-back task unrelated to emotion masked the true experimental purpose.</p>
</sec>
<sec>
<title>Results</title>
<p>Relative to houses, faces elicited stronger pupillary dilations as well as enhanced blood-oxygen-level-dependent (BOLD) activity in bilateral occipital and fusiform cortices as well as in both amygdalae. Among facial expressions, angry faces evoked the largest pupillary dilations, while fearful faces elicited the strongest neural responses within a right-lateralized network centered on the superior temporal sulcus (rSTS). Across all faces&#x003E;houses (conjunction minimum-statistic inference), pupil size correlated positively with BOLD activity in the right fusiform gyrus (rFFG), left inferior occipital gyrus (lIOG), bilateral calcarine cortex, and bilateral lingual gyrus.</p>
</sec>
<sec>
<title>Discussion</title>
<p>These findings indicate that emotional faces impose a higher perceptual load than matched control stimuli, engaging a distributed network spanning early visual and attention-related areas. In conclusion, our results suggest that emotional quality is specified early in the perceptual process, with divergent pupillary and neural signatures separating arousal-driven threat responses (anger) from socially complex alarm cues (fear).</p>
</sec>
</abstract>
<kwd-group>
<kwd>emotion quality</kwd>
<kwd>face processing</kwd>
<kwd>fMRI</kwd>
<kwd>gestalt</kwd>
<kwd>occipital cortex</kwd>
<kwd>perceptual load</kwd>
<kwd>pupillometry</kwd>
<kwd>superior temporal cortex</kwd>
</kwd-group>
<funding-group>
<funding-statement>The author(s) declared that financial support was received for this work and/or its publication. This work was supported by the Deutsche Forschungsgemeinschaft (German Research Foundation, DFG) under Germany&#x2019;s Excellence Strategy (EXC 3066/1 &#x201C;The Adaptive Mind&#x201D;, Project No. 533717223), by the DFG &#x2013; Project-ID 521379614 (projects B01 and INF) &#x2013; TRR 393 and by the DYNAMIC center, funded by the LOEWE program of the HMWK (grant number: LOEWE1/16/519/03/09.001(0009)/98).</funding-statement>
</funding-group>
<counts>
<fig-count count="4"/>
<table-count count="3"/>
<equation-count count="0"/>
<ref-count count="46"/>
<page-count count="9"/>
<word-count count="6944"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Cognitive Neuroscience</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="sec1">
<label>1</label>
<title>Introduction</title>
<p>The quality of an emotion constitutes a complex set of informational features. In the visual domain, the human face represents the most salient and universally used medium for expressing emotions. Across cultures, observers reliably categorize a limited set of basic emotions from facial expressions (<xref ref-type="bibr" rid="ref10">Ekman, 1992</xref>). Yet it remains an open question whether the human brain performs a similar classification of facial emotions&#x2014;and which neural regions integrate emotional quality into perceptual and experiential representations. FMRI studies have established that face processing engages a core network, encompassing the bilateral occipital and fusiform gyri as well as the superior temporal cortex, and an extended network, including anterior structures such as the bilateral amygdalae (<xref ref-type="bibr" rid="ref19">Haxby et al., 2000</xref>, <xref ref-type="bibr" rid="ref20">2002</xref>; <xref ref-type="bibr" rid="ref12">Fairhall and Ishai, 2007</xref>). Recent meta-analytical work supports the notion of the core and extended face processing networks even for dynamic stimuli (<xref ref-type="bibr" rid="ref39">Zinchenko et al., 2018</xref>).</p>
<p>How exactly emotional expressions are represented within this network remains an open question in both computationally inspired and fundamental neuroscience. It is still debated whether emotional information is embedded within the holistic (Gestalt) representation of a face or constitutes a distinct perceptual cue. Lesion evidence particularly implicates the right fusiform gyrus in processing individual facial identity (<xref ref-type="bibr" rid="ref9005">Rossion, 2008</xref>), whereas emotion processing has been more broadly linked to amygdala function&#x2014;extending beyond facial stimuli (<xref ref-type="bibr" rid="ref6">Davis and Whalen, 2001</xref>).</p>
<p>In fMRI research, the focus has traditionally been on the type of information being processed&#x2014;the &#x201C;what&#x201D;&#x2014;rather than on the dynamics of processing&#x2014;the &#x201C;how&#x201D; (e.g., <xref ref-type="bibr" rid="ref14">Fusar-Poli et al., 2009a</xref>, <xref ref-type="bibr" rid="ref15">2009b</xref>). In contrast, evidence from a recent event-related potential study comparing bodily and facial expressions suggests that the automatic processing of emotional signals from the body influences face recognition, but not as strongly in the opposite direction (<xref ref-type="bibr" rid="ref9004">Puffet and Rigoulot, 2025</xref>). Phenomenologically, the faster processing of body compared to facial emotions may reflect the fact that bodily cues more directly indicate possible actions and are therefore, as sensory data, more immediately relevant to the perceiver (<xref ref-type="bibr" rid="ref7">de Gelder, 2006</xref>). In short, whereas engineered, action-unit&#x2013;based models infer emotional categories from discrete cues such as facial muscle configurations, the human brain implements a more complex, valence-guided, and feedback-sensitive coding scheme that continuously biases activity in early visual areas (<xref ref-type="bibr" rid="ref9002">Murphy et al., 2011</xref>; <xref ref-type="bibr" rid="ref9">Deen et al., 2015</xref>).</p>
<p>Moreover, the processing of movement and continuity&#x2014;the temporal integration of dynamic changes in facial and bodily signals&#x2014;plays a crucial role in social perception. Within the core face network, the superior temporal cortex has been identified as the key site for integrating social and motion-related cues, thereby supporting both social-cognitive interpretation (<xref ref-type="bibr" rid="ref2">Blakemore, 2008</xref>) and visuomotor processing (<xref ref-type="bibr" rid="ref16">Grosbras et al., 2012</xref>).</p>
<p>Conceptually, the term <italic>emotion quality</italic> remains under-specified in face perception research. Most paradigms in the field still follow Ekman&#x2019;s framework of basic emotions (<xref ref-type="bibr" rid="ref10">Ekman, 1992</xref>; see <xref ref-type="bibr" rid="ref36">Vytal and Hamann, 2010</xref>). This model distinguishes classes of negative (e.g., fear, anger) and positive (e.g., happiness) valence; yet, what constitutes <italic>successful processing</italic> of such emotions remains unclear. For instance, autism research frequently operationalizes social cognition through Ekman-based facial emotion recognition tasks (<xref ref-type="bibr" rid="ref30">Nagy et al., 2021</xref>). Paradoxically, the sheer abundance of studies using Ekman faces has produced rather inconsistent behavioral and neural results. A central conceptual debate concerns whether a baseline emotion truly exists&#x2014;that is, whether any facial expression can be regarded as genuinely <italic>neutral</italic> and emotionally unloaded (<xref ref-type="bibr" rid="ref34">Uljarevic and Hamilton, 2013</xref>). At the neurophysiological level, emotional valence interacts with visual salience (or arousal) (<xref ref-type="bibr" rid="ref4">Corbetta and Shulman, 2002</xref>). In subjective experience, both implicit salience and explicit valence jointly shape the perceived Gestalt of a face.</p>
<p>Pupil dilation offers a valuable window into the neural mechanisms underlying emotion perception. Two complementary concepts are particularly informative:</p><list list-type="roman-lower">
<list-item>
<p>Perceptual load, which reflects the cognitive demands of stimulus processing and has been linked to emotional valence (<xref ref-type="bibr" rid="ref22">Kahneman and Beatty, 1966</xref>; <xref ref-type="bibr" rid="ref23">Kahneman and Wright, 1971</xref>; <xref ref-type="bibr" rid="ref9001">Kahneman et al., 1969</xref>).</p>
</list-item>
<list-item>
<p>Arousal-based neural responses, which precede non-arousal-related processes and correspond to sensory salience (<xref ref-type="bibr" rid="ref21">Honma et al., 2012</xref>; <xref ref-type="bibr" rid="ref9002">Murphy et al., 2011</xref>; <xref ref-type="bibr" rid="ref33">Tamietto et al., 2009</xref>).</p>
</list-item>
</list>
<p>In neuroimaging studies of emotional face perception, high-frequency pupillometry can help disentangle these processes by distinguishing rapid, arousal-driven responses from slower, cognitively mediated perceptual load effects. Traditional pupillometry shows that the time course of dilation encodes stimulus salience (<xref ref-type="bibr" rid="ref27">Kret et al., 2013</xref>), whereas the slower component of the pupil response&#x2014;emerging later and reflecting the processing demands of complex visual stimuli&#x2014;can be captured with high temporal precision using high-frequency eye-tracking (<xref ref-type="bibr" rid="ref38">Wierda et al., 2012</xref>). The simultaneous acquisition of high-frequency pupillometry and fMRI is methodologically critical for a holistic neurophysiological account of face perception, as it bridges a fundamental resolution gap. fMRI&#x2019;s low temporal resolution, on the order of seconds, is ill-suited to capture the rapid, sub-second dynamics of the subcortical visual pathways-including the superior colliculus and the pulvinar-that are intimately engaged in the initial, arousal-related components of processing socially salient faces. Pupillary oscillations, controlled by these same autonomic brainstem circuits, provide a continuous, millisecond-scale readout of this rapid arousal response. By correlating this high-fidelity temporal trace of arousal witch the spatially precise hemodynamic response signal from fMRI, researchers can disambiguate the distinct, yet temporally intertwined, contributions of the fast, subcortical arousal network from the slower, higher-order cortical regions involved in detailed face analysis, thereby providing a more complete and directionally linked model from initial orienting to full cognitive appraisal.</p>
<p>We therefore combined fMRI (MRI data entered a connectivity analysis whose results are already published; see, <xref ref-type="bibr" rid="ref24">Kessler et al., 2021</xref>) with high-frequency pupillometry to identify distinct neural mechanisms underlying face perception and the processing of emotional quality in response to <xref ref-type="bibr" rid="ref10">Ekman (1992)</xref> expressions. We focused on anger and fear, as previous work suggests that these emotions differ in their underlying quality and functional significance (<xref ref-type="bibr" rid="ref5">Davis et al., 2011</xref>). We hypothesized that angry faces, as direct threat signals, would evoke greater pupil dilation, reflecting heightened arousal, whereas fearful faces, which indicate environmental alarm, would preferentially engage the right superior temporal sulcus (rSTS)&#x2014;a region implicated in the integration of social and motion cues. To ensure balanced emotional valence, happy and neutral expressions were included as comparison conditions (<xref ref-type="bibr" rid="ref34">Uljarevic and Hamilton, 2013</xref>). Luminance-matched house images served as non-social control stimuli.</p>
</sec>
<sec sec-type="methods" id="sec2">
<label>2</label>
<title>Methods</title>
<sec id="sec3">
<label>2.1</label>
<title>Subjects</title>
<p>Twenty-five healthy volunteers (13 female; age range 21&#x2013;29&#x202F;years, mean&#x202F;=&#x202F;24.3, SD&#x202F;=&#x202F;2.1), recruited from students and staff at the University of Marburg, participated in the study. All participants were right-handed (<xref ref-type="bibr" rid="ref9003">Oldfield, 1971</xref>), had normal or corrected-to-normal vision, and reported no history of neurological or psychiatric disorders. Written informed consent was obtained from all participants. Experimental procedures were conducted in accordance with the Declaration of Helsinki and approved by the local Ethics Committee (proposal #30/16).</p>
</sec>
<sec id="sec4">
<label>2.2</label>
<title>Experimental design</title>
<p>Five stimulus conditions were presented: faces displaying neutral (NF), happy (HF), angry (AF), or fearful (FF) expressions from the Radboud Faces Database (<xref ref-type="bibr" rid="ref28">Langner et al., 2010</xref>), and houses (H) as a control condition. All images were converted to grayscale and cropped to 500&#x202F;&#x00D7;&#x202F;400 px using ImageMagick (version 6.8.9&#x2013;9, Q16 x86_64; <sup>&#x00A9;</sup>1999&#x2013;2014 ImageMagick Studio LLC). Mean luminance was equated across stimuli using the SHINE toolbox for MATLAB (<xref ref-type="bibr" rid="ref9007">Willenbockel et al., 2010</xref>). Spatial-frequency matching was deliberately omitted in order to preserve the natural frequency content that is critical for rapid face and emotion processing via subcortical pathways (<xref ref-type="bibr" rid="ref9006">Vuilleumier et al., 2003</xref>). Because pupil responses are highly sensitive to even subtle changes in low- and mid-frequency structure, any artificial SF equalization would have compromised the ecological validity of the stimuli as well as the perceptual mechanisms underlying the pupillary signal. In addition, all stimuli were already luminance-matched and have been validated in previous work; further SF manipulation would likely have introduced distortions that run counter to the aim of presenting perceptually natural emotional stimuli. Example stimuli are shown in <xref ref-type="fig" rid="fig1">Figure 1</xref>. Experimental Procedure is shown in <xref ref-type="fig" rid="fig2">Figure 2</xref>.</p>
<fig position="float" id="fig1">
<label>Figure 1</label>
<caption>
<p>Example stimuli. Faces displaying neutral (top left), happy (top middle), angry (top right), and fearful (bottom left) expressions, alongside luminance-matched houses as a control condition (bottom middle). Faces reproduced with permission from <xref ref-type="bibr" rid="ref28">Langner et al. (2010)</xref>.</p>
</caption>
<graphic xlink:href="fnhum-19-1739802-g001.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">Five black and white images arranged in a grid. Four depict a woman with a crosshair on her nose showing different expressions: neutral, smiling, angry, and fearful. The fifth image shows a large, two-story brick house with trees and a lawn.</alt-text>
</graphic>
</fig>
<fig position="float" id="fig2">
<label>Figure 2</label>
<caption>
<p>Procedure of the experiment. Each stimulus is presented for 350&#x202F;ms, with an inter-stimulus interval of 150&#x202F;ms. Twenty-four images form a block with a duration of 12&#x202F;s. Between blocks, there is a break lasting between 4 and 7&#x202F;s. Twenty blocks from each category (neutral, happy, angry, fearful, houses) are presented in a pseudo-randomized order. If the same stimulus is repeated, the participant should indicate the repetition by pressing a key. Faces reproduced with permission from <xref ref-type="bibr" rid="ref28">Langner et al. (2010)</xref>.</p>
</caption>
<graphic xlink:href="fnhum-19-1739802-g002.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">A sequential series of grayscale images shows a person&#x2019;s different facial expressions, from fearful to neutral, then angry, followed by a house, and finally smiling. Time intervals between these images are labeled as twelve seconds, three to seven seconds, and various short times. Below, overlapping images of people with neutral or emotional expressions change quickly, marked by intervals of 0.35 seconds and 0.15 seconds. An icon of a hand clicking is shown at the end.</alt-text>
</graphic>
</fig>
<p>Stimuli were presented in alternating block conditions (NF, HF, AF, FF, H) on a rear-projected 16:9 monitor, viewed via a mirror positioned approximately 15&#x202F;cm above the participant&#x2019;s eyes in the MRI scanner (Presentation v14.1, Neurobehavioral Systems). Participants were naive to the experimental purpose and performed a cover one-back task (button presses with both index fingers) to maintain attention.</p>
<p>Each block contained 24 stimuli, each displayed for 350&#x202F;ms with 150&#x202F;ms inter-stimulus intervals. The block order was identical for all participants. In total, 100 blocks (~12&#x202F;s each) were presented, yielding an experimental duration of ~30&#x202F;min. Each block was preceded by a fixation cross presented for a jittered interval of 4,000&#x2013;7,000&#x202F;ms.</p>
</sec>
<sec id="sec5">
<label>2.3</label>
<title>Data acquisition</title>
<sec id="sec6">
<label>2.3.1</label>
<title>Pupillometry data</title>
<p>Left-eye pupil diameter was recorded continuously at 1&#x202F;kHz during each ~12&#x202F;s block using an MRI-compatible EyeLink 1,000 infrared camera (SR Research). A 5-point calibration was performed prior to recording. Blinks were identified using the standard EyeLink detection routines.</p>
</sec>
<sec id="sec7">
<label>2.3.2</label>
<title>MRI data</title>
<p>MRI data was acquired on a 3&#x202F;T Siemens scanner (TIM Trio, Siemens, Erlangen, Germany). High-resolution T1-weighted anatomical images were acquired for each participant using a magnetization-prepared rapid gradient-echo (3D MP-RAGE) sequence in sagittal orientation (TR&#x202F;=&#x202F;1900&#x202F;ms, TE&#x202F;=&#x202F;2.54&#x202F;ms, voxel size&#x202F;=&#x202F;1&#x202F;&#x00D7;&#x202F;1&#x202F;&#x00D7;&#x202F;1&#x202F;mm<sup>3</sup>, 176 slices, 1&#x202F;mm thickness, flip angle 9&#x00B0;, matrix size&#x202F;=&#x202F;384&#x202F;&#x00D7;&#x202F;384, FoV&#x202F;=&#x202F;384&#x202F;&#x00D7;&#x202F;384&#x202F;mm). Functional data were collected using a <italic>T2&#x002A;-weighted EPI sequence</italic> sensitive to the BOLD contrast (TR&#x202F;=&#x202F;1,550&#x202F;ms; TE&#x202F;=&#x202F;36&#x202F;ms; flip angle&#x202F;=&#x202F;70&#x00B0;) with 20 transverse slices (slice thickness&#x202F;=&#x202F;2.7&#x202F;mm; interslice gap&#x202F;=&#x202F;0.4&#x202F;mm; FoV&#x202F;=&#x202F;200&#x202F;mm; voxel size&#x202F;=&#x202F;2.8&#x202F;&#x00D7;&#x202F;2.8&#x202F;&#x00D7;&#x202F;3.1&#x202F;mm, including gap). This sequence was chosen based on pilot data to provide robust single-subject amygdala activation.</p>
</sec>
</sec>
<sec id="sec8">
<label>2.4</label>
<title>Data analysis</title>
<sec id="sec9">
<label>2.4.1</label>
<title>Pupillometry data</title>
<p>Blinks and saccades were detected using EyeLink routines with standard thresholds (saccade acceleration &#x2265; 500&#x00B0;/s<sup>2</sup>; velocity &#x2265; 50&#x00B0;/s). Microsaccades were treated as saccades. Segments containing blinks within the first 1,500&#x202F;ms of a block were excluded due to presumed reduced attention at block onset. For the included trials/blocks, blink periods were linearly interpolated (<xref ref-type="bibr" rid="ref13">Fr&#x00E4;ssle et al., 2016</xref>). To reduce sequence effects, pupil traces were normalized per block to the average pupil size during the first 200&#x202F;ms following the first stimulus onset (<xref ref-type="bibr" rid="ref38">Wierda et al., 2012</xref>). Preprocessing and temporal analyses were performed in MATLAB (R2014a).</p>
<p>Pupil traces from 0&#x2013;5&#x202F;s relative to the first stimulus onset were extracted for each condition. This window captures both fast (initial) and slow (later) responses, while remaining within the first half of the block, which is assumed to be less affected by blink-related artifacts. The remaining ~7,000&#x202F;ms of each block were excluded due to increased noise from blinks.</p>
<p>Mean pupil dilation within the 0&#x2013;5&#x202F;s window was compared between conditions using Wilcoxon&#x2013;Mann&#x2013;Whitney tests with sequential Bonferroni correction (<italic>&#x03B1;</italic> =&#x202F;0.05). In addition, an ANOVA (SPSS 21, IBM) was conducted to assess the effect of condition on mean pupil dilation.</p>
<p>Parametric modulation of fMRI by pupil size: Parametric regressors were derived from the initial 5&#x202F;s of each block. Pupil data were normalized to baseline (0&#x2013;200&#x202F;ms) as percentage change, downsampled to match the MR micro-time resolution, and demeaned for SPM compatibility. Regressors were then convolved with the canonical hemodynamic response function and resampled at the TR (1.55&#x202F;s). These parametric regressors were included per condition as effects of interest in a second first-level fMRI model.</p>
</sec>
<sec id="sec10">
<label>2.4.2</label>
<title>MRI data</title>
<p>MRI data were preprocessed using SPM12 (r6685; Wellcome Centre for Human Neuroimaging; MATLAB). The first three functional volumes were discarded to allow for T1 signal stabilization. Field maps were computed from phase and magnitude images, converted to voxel displacement maps, and used to unwarp EPI images. A combined realign-and-unwarp procedure corrected for static and motion-related susceptibility distortions, while within-subject motion was further corrected using 6-parameter rigid-body transformations. Functional images were then normalized to Montreal Neurological Institute (MNI) space and smoothed with a 6&#x202F;mm full-width-at-half-maximum (FWHM) Gaussian kernel.</p>
<p>A general linear (Generalized Linear Model, GLM) block design was specified for the five conditions (NF, HF, AF, FF, H) using the canonical HRF without temporal or dispersion derivatives. Onset vectors were generated for each participant from Presentation logs. The six motion parameters were included as nuisance regressors. A high-pass filter at 1/256&#x202F;Hz was applied (extended from the standard 1/128&#x202F;Hz). For each participant, condition-specific effects produced five t-contrasts corresponding to NF, HF, AF, FF, and H.</p>
<p>For the pupil-covariation GLM, conditions were modeled as regressors of no interest, while t-contrasts targeted the parametric modulators derived from the initial-phase pupil data. To isolate face-selective correlations, each face condition was contrasted against houses at the first level, resulting in four t-contrasts per participant (pupilmod_NF: NF&#x202F;&#x003E;&#x202F;H, etc.).</p>
<p>At the group level, a flexible factorial model was used to combine single-subject contrasts. Unless otherwise noted, second-level contrasts were evaluated using t-statistics with voxel-wise family-wise error (FWE) correction (<italic>p</italic> &#x003C;&#x202F;0.05) and a cluster-extent threshold of k&#x202F;&#x2265;&#x202F;10 voxels, to reduce false positives in small ROIs such as the amygdalae. For the anatomical labelling of resulting cluster peak voxel location, the SPM-implemented anatomy toolbox (atlas) was used.</p>
<sec id="sec11">
<label>2.4.2.1</label>
<title>Contrasts of interest</title>
<p>Commonalities and differences in emotional face processing: a group conjunction of all face &#x003E; house contrasts (NF&#x202F;&#x003E;&#x202F;H &#x2229; HF&#x202F;&#x003E;&#x202F;H &#x2229; AF&#x202F;&#x003E;&#x202F;H &#x2229; FF&#x202F;&#x003E;&#x202F;H) was used to assess shared face-related BOLD responses. Differences among the two negative emotions were examined using pairwise contrasts (i.e., FF&#x202F;&#x003E;&#x202F;AF, AF&#x202F;&#x003E;&#x202F;FF). To assess the common negative valence of fear and anger, we computed for each the conjunction contrasts to the two non-negative conditions (FF&#x202F;&#x003E;&#x202F;HF) &#x2229; (FF&#x202F;&#x003E;&#x202F;NF), (AF&#x202F;&#x003E;&#x202F;HF) &#x2229; (AF&#x202F;&#x003E;&#x202F;NF).</p>
<p>Parametric modulation by pupil dilation: a main-effect t-contrast tested the average modulation of face-related BOLD activity by pupil dilation over 0&#x2013;5&#x202F;s (pupilmod_NF, pupilmod_HF, pupilmod_AF, pupilmod_FF), with family-wise error correction applied.</p>
</sec>
</sec>
</sec>
</sec>
<sec sec-type="results" id="sec12">
<label>3</label>
<title>Results</title>
<sec id="sec13">
<label>3.1</label>
<title>fMRI data</title>
<sec id="sec14">
<label>3.1.1</label>
<title>Activation for faces across all emotions</title>
<p>The conjunction of all face &#x003E; house contrasts (NF&#x202F;&#x003E;&#x202F;H &#x2229; HF&#x202F;&#x003E;&#x202F;H &#x2229; AF&#x202F;&#x003E;&#x202F;H &#x2229; FF&#x202F;&#x003E;&#x202F;H) revealed increased BOLD activity in the bilateral inferior occipital (IOG) and fusiform (FFG) gyri, corresponding to the core face perception network, as well as in the bilateral amygdalae (AMY) (<xref ref-type="fig" rid="fig3">Figure 3</xref>, turquoise; <xref ref-type="table" rid="tab1">Table 1</xref>).</p>
<fig position="float" id="fig3">
<label>Figure 3</label>
<caption>
<p>Turquoise: face-related brain activation, irrespective of emotional content [i.e., conjunction contrast (neutral faces &#x003E; houses) &#x2229; (happy faces &#x003E; houses) &#x2229; (angry faces &#x003E; houses) &#x2229; (fearful faces &#x003E; houses)], was observed in the bilateral IOG and FFG &#x2014;the core face perception network&#x2014;as well as in the bilateral AMY. Green: associations between pupil dilation and BOLD activity were found in multiple regions of the occipito-temporal cortex, including the core system of face perception as well as more posterior located regions in the early visual cortex. Statistical threshold: <italic>p</italic>&#x202F;&#x003C;&#x202F;0.05, FWE-corrected, with a cluster-extent threshold of 10 voxels. IOG&#x202F;=&#x202F;inferior occipital gyrus, FFG&#x202F;=&#x202F;fusiform gyrus, AMY&#x202F;=&#x202F;amygdala.</p>
</caption>
<graphic xlink:href="fnhum-19-1739802-g003.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">Brain MRI scans show activation areas marked in blue and green overlayed on four different z-plane views, labeled as Faces greater than Houses (FWE-corrected) for main effect and pupil covariation. z-coordinate (MNI) are -16, -12, -2, and 8.</alt-text>
</graphic>
</fig>
<table-wrap position="float" id="tab1">
<label>Table 1</label>
<caption>
<p>fMRI results for the conjunction contrast (neutral faces &#x003E; houses) &#x2229; (happy faces &#x003E; houses) &#x2229; (angry faces &#x003E; houses) &#x2229; (fearful faces &#x003E; houses).</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Cluster</th>
<th align="center" valign="top" colspan="3">MNI-coordinates (x, y, z)</th>
<th align="center" valign="top">Cluster size</th>
<th align="center" valign="top">Peak T-value</th>
<th align="center" valign="top">Cluster <italic>p</italic>-value</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">R FFG</td>
<td align="center" valign="top">42</td>
<td align="center" valign="top">&#x2212;52</td>
<td align="center" valign="top">20</td>
<td align="center" valign="top">111</td>
<td align="center" valign="top">10.79</td>
<td align="center" valign="top">&#x003C;0.001</td>
</tr>
<tr>
<td align="left" valign="top">R IOG</td>
<td align="center" valign="top">48<break/>54</td>
<td align="center" valign="top">&#x2212;72<break/>&#x2212;52</td>
<td align="center" valign="top">&#x2212;6<break/>4</td>
<td align="center" valign="top">360</td>
<td align="center" valign="top">10.20<break/>5.80</td>
<td align="center" valign="top">&#x003C;0.001</td>
</tr>
<tr>
<td align="left" valign="top">L FFG</td>
<td align="center" valign="top">&#x2212;42</td>
<td align="center" valign="top">&#x2212;54</td>
<td align="center" valign="top">&#x2212;18</td>
<td align="center" valign="top">80</td>
<td align="center" valign="top">8.85</td>
<td align="center" valign="top">&#x003C;0.001</td>
</tr>
<tr>
<td align="left" valign="top">L IOG</td>
<td align="center" valign="top">&#x2212;40</td>
<td align="center" valign="top">&#x2212;84</td>
<td align="center" valign="top">&#x2212;12</td>
<td align="center" valign="top">28</td>
<td align="center" valign="top">6.88</td>
<td align="center" valign="top">&#x003C;0.001</td>
</tr>
<tr>
<td align="left" valign="top">R AMY</td>
<td align="center" valign="top">22</td>
<td align="center" valign="top">&#x2212;6</td>
<td align="center" valign="top">&#x2212;12</td>
<td align="center" valign="top">23</td>
<td align="center" valign="top">6.55</td>
<td align="center" valign="top">&#x003C;0.001</td>
</tr>
<tr>
<td align="left" valign="top">L AMY</td>
<td align="center" valign="top">&#x2212;18</td>
<td align="center" valign="top">&#x2212;8</td>
<td align="center" valign="top">&#x2212;14</td>
<td align="center" valign="top">12</td>
<td align="center" valign="top">5.48</td>
<td align="center" valign="top">0.002</td>
</tr>
<tr>
<td/>
<td align="center" valign="top">16</td>
<td align="center" valign="top">&#x2212;36</td>
<td align="center" valign="top">20</td>
<td align="center" valign="top">31</td>
<td align="center" valign="top">5.53</td>
<td align="center" valign="top">&#x003C;0.001</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p>Statistical threshold: <italic>p</italic> &#x003C;&#x202F;0.05, FWE-corrected, with a cluster-extent threshold of 10 voxels. FFG&#x202F;=&#x202F;fusiform gyrus, IOG&#x202F;=&#x202F;inferior occipital gyrus, AMY&#x202F;=&#x202F;amygdala, R&#x202F;=&#x202F;right, L&#x202F;=&#x202F;left.</p>
</table-wrap-foot>
</table-wrap>
</sec>
<sec id="sec15">
<label>3.1.2</label>
<title>Differential activations of negative emotions</title>
<p>Significant differences were observed only for fearful faces (<xref ref-type="table" rid="tab2">Table 2</xref>). The conjunction FF &#x003E; HF &#x2229; FF &#x003E; NF revealed clusters in the right superior temporal sulcus and gyrus (STS/STG), right IOG and left IOG. The contrast FF &#x003E; AF showed increased responses in the right IOG, right STS, and right amygdala (AMY).</p>
<table-wrap position="float" id="tab2">
<label>Table 2</label>
<caption>
<p>fMRI results for the differences between negative emotions.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Cluster</th>
<th align="center" valign="top" colspan="3">MNI-coordinates (x, y, z)</th>
<th align="center" valign="top">Cluster size</th>
<th align="center" valign="top">Peak T-value</th>
<th align="center" valign="top">Cluster <italic>p</italic>-value</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top" colspan="7">Contrast: (fearful faces &#x003E; happy faces) &#x2229; (fearful faces &#x003E; neutral faces)</td>
</tr>
<tr>
<td align="left" valign="top">R STS</td>
<td align="center" valign="top">48</td>
<td align="center" valign="top">&#x2212;42</td>
<td align="center" valign="top">6</td>
<td align="center" valign="top">168</td>
<td align="char" valign="top" char=".">7.20</td>
<td align="char" valign="top" char=".">&#x003C;0.001</td>
</tr>
<tr>
<td align="left" valign="top">R IOG</td>
<td align="center" valign="top">36</td>
<td align="center" valign="top">&#x2212;84</td>
<td align="center" valign="top">&#x2212;8</td>
<td align="center" valign="top">31</td>
<td align="char" valign="top" char=".">5.65</td>
<td align="char" valign="top" char=".">0.015</td>
</tr>
<tr>
<td align="left" valign="top">R STG</td>
<td align="center" valign="top">52</td>
<td align="center" valign="top">&#x2212;60</td>
<td align="center" valign="top">&#x2212;2</td>
<td align="center" valign="top">17</td>
<td align="char" valign="top" char=".">5.38</td>
<td align="char" valign="top" char=".">0.035</td>
</tr>
<tr>
<td align="left" valign="top">L IOG</td>
<td align="center" valign="top">&#x2212;20</td>
<td align="center" valign="top">&#x2212;86</td>
<td align="center" valign="top">&#x2212;14</td>
<td align="center" valign="top">15</td>
<td align="char" valign="top" char=".">5.24</td>
<td align="char" valign="top" char=".">0.045</td>
</tr>
<tr>
<td align="left" valign="top" colspan="7">Contrast: fearful faces &#x003E; angry faces</td>
</tr>
<tr>
<td align="left" valign="top">R IOG</td>
<td align="center" valign="top">32</td>
<td align="center" valign="top">&#x2212;88</td>
<td align="center" valign="top">&#x2212;8</td>
<td align="center" valign="top">81</td>
<td align="char" valign="top" char=".">6.06</td>
<td align="char" valign="top" char=".">0.001</td>
</tr>
<tr>
<td align="left" valign="top">R STS</td>
<td align="center" valign="top">42</td>
<td align="center" valign="top">&#x2212;36</td>
<td align="center" valign="top">10</td>
<td align="center" valign="top">32</td>
<td align="char" valign="top" char=".">5.90</td>
<td align="char" valign="top" char=".">0.012</td>
</tr>
<tr>
<td align="left" valign="top">R AMY</td>
<td align="center" valign="top">26</td>
<td align="center" valign="top">0</td>
<td align="center" valign="top">&#x2212;14</td>
<td align="center" valign="top">20</td>
<td align="char" valign="top" char=".">5.53</td>
<td align="char" valign="top" char=".">0.025</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p>Significant increases of BOLD-response were observed only for fearful faces. Statistical threshold: <italic>p</italic> &#x003C;&#x202F;0.05, FWE-corrected, with a cluster-extent threshold of 10 voxels. STS&#x202F;=&#x202F;superior temporal sulcus, IOG&#x202F;=&#x202F;inferior occipital gyrus, STG&#x202F;=&#x202F;superior temporal gyrus, AMY&#x202F;=&#x202F;amygdala, R&#x202F;=&#x202F;right, L&#x202F;=&#x202F;left.</p>
</table-wrap-foot>
</table-wrap>
</sec>
</sec>
<sec id="sec16">
<label>3.2</label>
<title>Pupillometry data</title>
<p>Initial pupil constriction peaked at approximately 600&#x202F;ms after the first stimulus onset, followed by redilation, which reached its maximum from around 2,500&#x202F;ms onward. After the initial constriction, all face conditions exhibited larger pupil sizes over time compared with houses (<xref ref-type="fig" rid="fig4">Figure 4</xref>, light stars). A repeated-measures ANOVA revealed a significant main effect of condition on pupil dilation, <italic>F</italic> =&#x202F;14.8, <italic>p</italic> &#x003C;&#x202F;0.001, partial &#x03B7;<sup>2</sup> =&#x202F;0.059. A sensitivity power analysis conducted in G&#x002A;Power 3.1 for a within-subjects ANOVA with five measurements, using a sample size of 25, <italic>&#x03B1;</italic> =&#x202F;0.05, and 80% power, determined that this design could detect effects of size <italic>f</italic> &#x2265;&#x202F;0.22, which is below our observed effect size. Notably, ranksum test revealed that angry faces elicited a significantly greater increase over time than all other face conditions (<xref ref-type="fig" rid="fig4">Figure 4</xref>, dark stars). Bonferroni post-hoc tests revealed that pupil dilation was significantly greater in AF compared to NF (mean difference&#x202F;=&#x202F;0.0081, <italic>p</italic> &#x003C;&#x202F;0.001), HF (mean difference&#x202F;=&#x202F;0.0055, <italic>p</italic> =&#x202F;0.011), FF (mean difference&#x202F;=&#x202F;0.0065, <italic>p</italic> =&#x202F;0.001) and H (mean difference&#x202F;=&#x202F;0.0168, <italic>p</italic> &#x003C;&#x202F;0.001), while H elicited significantly smaller dilation than all other conditions (mean differences H-NF&#x202F;=&#x202F;&#x2212;0.0087, H-HF&#x202F;=&#x202F;&#x2212;0.0113, H-FF&#x202F;=&#x202F;&#x2212;0.0103, all <italic>p</italic> &#x003C;&#x202F;0.001).</p>
<fig position="float" id="fig4">
<label>Figure 4</label>
<caption>
<p>Averaged pupil dilations over time (normalized to the first 200&#x202F;ms of each block; 0&#x2013;5,000&#x202F;ms after first stimulus onset). Following stimulus onset, the pupil initially constricted, peaking at approximately 600&#x202F;ms, and subsequently redilated. Redilation was significantly larger for faces than for houses (light stars). Among face conditions, angry faces elicited a significantly greater increase over time compared with the other expressions (dark stars).</p>
</caption>
<graphic xlink:href="fnhum-19-1739802-g004.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">Line graph showing normalized mean pupil size over time in milliseconds for different emotional expressions: neutral, happy, angry, fearful, and a control condition of houses. Pupil size initially drops then differentiates, with lines for each expression showing distinct trends and error bars.</alt-text>
</graphic>
</fig>
</sec>
<sec id="sec17">
<label>3.3</label>
<title>Combination of pupillometry and fMRI data</title>
<p>Parametric analyses revealed a significant main effect of parametric modulation by pupil dilation on face-emotion-specific BOLD activity in the left inferior and middle occipital gyri (IOG, MOG), the right fusiform (FFG), as well as in more posterior regions such as the left occipital pole (OCP), the right calcarine gyrus (CAL), and the bilateral lingual gyrus (LG) (<xref ref-type="fig" rid="fig3">Figure 3</xref>, green; <xref ref-type="table" rid="tab3">Table 3</xref>).</p>
<table-wrap position="float" id="tab3">
<label>Table 3</label>
<caption>
<p>Main effect of pupil dilation on BOLD responses for neutral, happy, angry, and fearful faces.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Cluster</th>
<th align="center" valign="top" colspan="3">MNI-coordinates (x, y, z)</th>
<th align="center" valign="top">Cluster size</th>
<th align="center" valign="top">Peak T-value</th>
<th align="center" valign="top">Cluster <italic>p</italic>-value</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">R FFG</td>
<td align="center" valign="top">34</td>
<td align="center" valign="top">&#x2212;50</td>
<td align="center" valign="top">&#x2212;16</td>
<td align="center" valign="top">27</td>
<td align="center" valign="top">6.58</td>
<td align="char" valign="top" char=".">&#x003C;0.001</td>
</tr>
<tr>
<td align="left" valign="top">L IOG</td>
<td align="center" valign="top">&#x2212;22</td>
<td align="center" valign="top">&#x2212;80</td>
<td align="center" valign="top">&#x2212;12</td>
<td align="center" valign="top">45</td>
<td align="center" valign="top">6.44</td>
<td align="char" valign="top" char=".">&#x003C;0.001</td>
</tr>
<tr>
<td align="left" valign="top">R CAL</td>
<td align="center" valign="top">14</td>
<td align="center" valign="top">&#x2212;96</td>
<td align="center" valign="top">&#x2212;2</td>
<td align="center" valign="top">19</td>
<td align="center" valign="top">6.26</td>
<td align="char" valign="top" char=".">&#x003C;0.001</td>
</tr>
<tr>
<td align="left" valign="top">L MOG</td>
<td align="center" valign="top">&#x2212;30<break/>&#x2212;25</td>
<td align="center" valign="top">&#x2212;92<break/>&#x2212;85</td>
<td align="center" valign="top">8<break/>12</td>
<td align="center" valign="top">41</td>
<td align="center" valign="top">5.95<break/>5.88</td>
<td align="char" valign="top" char=".">&#x003C;0.001</td>
</tr>
<tr>
<td align="left" valign="top">L LG</td>
<td align="center" valign="top">&#x2212;10</td>
<td align="center" valign="top">&#x2212;88</td>
<td align="center" valign="top">12</td>
<td align="center" valign="top">18</td>
<td align="center" valign="top">5.72</td>
<td align="char" valign="top" char=".">&#x003C;0.001</td>
</tr>
<tr>
<td align="left" valign="top">R LG</td>
<td align="center" valign="top">26</td>
<td align="center" valign="top">&#x2212;78</td>
<td align="center" valign="top">&#x2212;6</td>
<td align="center" valign="top">16</td>
<td align="center" valign="top">5.69</td>
<td align="char" valign="top" char=".">&#x003C;0.001</td>
</tr>
<tr>
<td align="left" valign="top">L OCP</td>
<td align="center" valign="top">&#x2212;14</td>
<td align="center" valign="top">&#x2212;98</td>
<td align="center" valign="top">&#x2212;2</td>
<td align="center" valign="top">19</td>
<td align="center" valign="top">5.49</td>
<td align="char" valign="top" char=".">&#x003C;0.001</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p>Statistical threshold: <italic>p</italic> &#x003C;&#x202F;0.05, FWE-corrected, with a cluster-extent threshold of 10 voxels. FFG&#x202F;=&#x202F;fusiform gyrus; IOG&#x202F;=&#x202F;inferior occipital gyrus, MOG&#x202F;=&#x202F;middle occipital gyrus; CAL&#x202F;=&#x202F;calcarine cortex; OCP&#x202F;=&#x202F;occipital pole; LG&#x202F;=&#x202F;lingual gyrus; R&#x202F;=&#x202F;right; L&#x202F;=&#x202F;left.</p>
</table-wrap-foot>
</table-wrap>
</sec>
</sec>
<sec sec-type="discussion" id="sec18">
<label>4</label>
<title>Discussion</title>
<p>The perception of emotion from faces integrates both sensory input and cognitive appraisal. In this study, we examined how the perceptual load of emotional faces, indexed by pupillometry, relates to their neural processing, with a particular focus on dissociating pathways for the negative emotions of anger and fear.</p>
<sec id="sec19">
<label>4.1</label>
<title>The perceptual load of faces</title>
<p>Consistent with the established core face-perception network (<xref ref-type="bibr" rid="ref19">Haxby et al., 2000</xref>, <xref ref-type="bibr" rid="ref20">2002</xref>), faces evoked greater BOLD responses in bilateral occipital and fusiform cortices and larger pupillary dilations than luminance-matched houses. This is in line with evidence for face-selective attentional modulation under high perceptual load (<xref ref-type="bibr" rid="ref31">Neumann et al., 2011</xref>).</p>
<p>Critically, the parametric modulation of the BOLD signal by pupil dilation provides direct evidence for this link, showing that face-evoked pupil time courses correlated with activity in a distributed occipital network including the right fusiform gyrus (rFFG), left inferior (lIOG) and middle occipital gyrus (lMOG), the left occipital pole (lOCP), and crucially, early visual areas such as the bilateral calcarine (CAL) and lingual gyri (LG).</p>
<p>The pupil-linked modulation in the calcarine cortex is particularly informative. As a site of primary visual processing (<xref ref-type="bibr" rid="ref26">Klein et al., 2000</xref>) that is modulated by attention and behavioral relevance (<xref ref-type="bibr" rid="ref18">Han et al., 2005</xref>), its correlation with pupil size&#x2014;a known index of arousal and processing demand (<xref ref-type="bibr" rid="ref22">Kahneman and Beatty, 1966</xref>; <xref ref-type="bibr" rid="ref3">Bradley et al., 2008</xref>)&#x2014;strongly suggests that faces impose a higher perceptual load than inanimate objects.</p>
<p>This load-related activity extends into the lingual gyrus, a region associated with internally directed attention and known for early face-selective responses (<xref ref-type="bibr" rid="ref1">Benedek et al., 2016</xref>; <xref ref-type="bibr" rid="ref32">Par&#x00E9; et al., 2023</xref>), supporting its role in forming the abstract representation of the face category (<xref ref-type="bibr" rid="ref37">Watson et al., 2016</xref>). These findings indicate that the core face network, particularly the right fusiform gyrus, is not only engaged for face processing per se, but that its activity level is tuned to occipital-lingual representations of overall perceptual load, as reflected in pupil diameter.</p>
</sec>
<sec id="sec20">
<label>4.2</label>
<title>Anger dilates: a threat-triggered arousal response</title>
<p>Our key finding reveals a clear dissociation between anger and fear: while anger specifically enhanced pupil dilation, fear preferentially engaged distinct neural regions. This suggests that anger processing is characterized by a broad, arousal-dominated response.</p>
<p>Anger likely drives this heightened perceptual effort due to its direct threatening nature, whereas fear signals an indirect, environmental threat. This aligns with findings that angry faces are better remembered, suggesting they draw attention to the threatening agent itself, whereas fear directs attention outward to the environment (<xref ref-type="bibr" rid="ref5">Davis et al., 2011</xref>). Our pupillometric data indicate that this anger-specific response is rapid, with a stronger pupil response emerging between 1800&#x2013;2,900&#x202F;ms&#x2014;a timeframe compatible with late affective appraisal in event-related potential (ERP) studies (<xref ref-type="bibr" rid="ref25">Klein et al., 2015</xref>). This rapid arousal response likely biases early visual processing (<xref ref-type="bibr" rid="ref35">Vinck et al., 2015</xref>), priming the system for immediate action.</p>
<p>From a Gestalt perspective, visual systems prioritize cues with immediate behavioral relevance. The direct threat of potential violence conveyed by anger is thus prioritized, triggering a global arousal state reflected in the pupil. This dovetails with work showing angry faces modulate frontal empathy networks (<xref ref-type="bibr" rid="ref11">Enzi et al., 2016</xref>). The fact that the amygdala was more engaged by fear than anger further underscores this dissociation; the amygdala&#x2019;s role in vigilance for ambiguous threats (<xref ref-type="bibr" rid="ref6">Davis and Whalen, 2001</xref>) makes it more critical for processing the alarm signal of fear than the clear, direct threat of anger.</p>
</sec>
<sec id="sec21">
<label>4.3</label>
<title>Fear engages: a neural signature for social alarm</title>
<p>In contrast to the broader arousal response elicited by anger, fearful faces recruited a circumscribed and right-lateralized network encompassing the superior temporal sulcus (STS), inferior occipital gyrus (IOG), and the amygdala. Fearful expressions selectively increased activation in the right STS and IOG relative to happy and neutral faces, and&#x2014;critically&#x2014;engaged the right STS and amygdala more strongly than anger.</p>
<p>This pattern suggests that fear processing extends beyond basic threat detection, engaging circuits specialized for decoding socially informative cues. The STS is a well-established hub for integrating dynamic facial features, biological motion, and gaze direction (<xref ref-type="bibr" rid="ref9">Deen et al., 2015</xref>; <xref ref-type="bibr" rid="ref17">Grosbras and Paus, 2006</xref>), all of which are essential for identifying both the source and direction of potential danger. Recent evidence further indicates that rapid visual pathways supporting fear detection may already encode high-level social information rather than merely low-level threat signals (e.g., <xref ref-type="bibr" rid="ref29">Lanzilotto et al., 2025</xref>). This interpretation aligns with contemporary work emphasizing that the amygdala contributes not only to vigilance but also to the evaluation of ambiguous or context-dependent social stimuli (<xref ref-type="bibr" rid="ref6">Davis and Whalen, 2001</xref>).</p>
<p>In essence, while anger tends to trigger a direct &#x201C;body alarm&#x201D; reflected in peripheral autonomic responses such as pupil dilation, fear preferentially engages a &#x201C;social-cognitive alarm&#x201D; that mobilizes the STS and amygdala to search for the source of threat in the environment.</p>
</sec>
</sec>
<sec id="sec22">
<label>5</label>
<title>Conclusion and synthesis</title>
<p>In summary, our multimodal approach dissociates the neural and psychophysiological pathways for processing angry and fearful faces. We demonstrate that anger is predominantly associated with a threat-triggered arousal response, indexed by pupil dilation, which reflects a global state of preparedness. In contrast, fear is characterized by the specific engagement of a right-lateralized network&#x2014;including the STS and amygdala&#x2014;specialized in processing social cues and environmental alarm. This &#x201C;anger dilates, fear engages&#x201D; dichotomy provides a parsimonious framework for understanding how the brain efficiently processes distinct negative emotional qualities to guide adaptive behavior. We particularly consider the role of a fast, subcortical pathway (involving the superior colliculus, pulvinar, and amygdala) in the rapid processing of fear. This &#x201C;low road&#x201D; provides a mechanistic foundation for the amygdala&#x2019;s rapid, automatic response to fearful faces, which then initiates a vigilant state and guides subsequent cortical analysis (<xref ref-type="bibr" rid="ref8">de Gelder et al., 2011</xref>). In this framework, the direct threat of anger may be less dependent on this rapid subcortical alert. Instead, anger processing might engage cortical pathways more directly from the outset, supporting the detailed appraisal of hostile intent and coordinating the broad, sustained cortical arousal reflected in the pupil dilation.</p>
<sec id="sec23">
<label>5.1</label>
<title>Limitations</title>
<p>The interpretability of our findings is subject to several design constraints. Conceptually, whether the higher perceptual load is due to the no-emotion-neutrality of faces is an interpretation of the pupillary modulation findings that needs to be verified. A replication of the combined high-frequency pupillometric and fMRI study using only neutral face stimuli, would serve this purpose. Methodically, our strategic choice to optimize for robust subcortical and ventral temporal coverage resulted in a limited field of view (20 slices), potentially omitting activity in higher-order regions such as the prefrontal cortex. Additionally, the fixed block design, while powerful, precludes the disentanglement of transient neural responses from sustained emotional adaptation and may be susceptible to order effects. Leaving spatial frequencies natural preserves the ecological validity but might have confounded results. Future studies manipulating spatial frequencies, in particular in relation to the amygdala response, would help address this question. Finally, the use of a one-back cover task, though effective for controlling attention, may have inadvertently modulated emotional processing through its added cognitive load.</p>
</sec>
<sec id="sec24">
<label>5.2</label>
<title>Outlook</title>
<p>The distinct &#x201C;arousal-for-threat&#x201D; versus &#x201C;engagement-for-alarm&#x201D; model we propose provides a clear, testable framework for future research. Crucially, these findings underscore the necessity for replication in independent cohorts, particularly to confirm the robustness of the right STS in fear processing. Our study also highlights the advantage of a multimodal approach. Relying solely on fMRI might have led to the simplistic conclusion that fear is &#x201C;more processed&#x201D; than anger in temporal regions, whereas pupillometry alone would have suggested anger is the more potent stimulus. It was only by combining these measures that we could dissociate the broad, arousal-based impact of anger from the specific, socially-informative neural engagement elicited by fear. Future studies should leverage this multimodal strategy to investigate whether this dichotomy generalizes to other stimuli, such as dynamic faces or full-body expressions, and to explore its potential alterations in clinical populations with deficits in threat or social cue processing.</p>
</sec>
</sec>
</body>
<back>
<sec sec-type="data-availability" id="sec25">
<title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec sec-type="ethics-statement" id="sec26">
<title>Ethics statement</title>
<p>The study procedure involving humans conformed to the Declaration of Helsinki and was approved by the local ethics committee of the Medical Faculty of the University of Marburg (file ref. 39&#x2013;17 BO). The participants provided their written informed consent to participate in this study.</p>
</sec>
<sec sec-type="author-contributions" id="sec27">
<title>Author contributions</title>
<p>KW: Methodology, Formal analysis, Data curation, Supervision, Software, Conceptualization, Investigation, Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing. RK: Writing &#x2013; original draft, Visualization, Formal analysis, Methodology, Data curation, Writing &#x2013; review &#x0026; editing, Validation, Investigation. KR: Writing &#x2013; original draft, Conceptualization, Supervision, Methodology, Writing &#x2013; review &#x0026; editing. JS: Software, Supervision, Writing &#x2013; review &#x0026; editing, Writing &#x2013; original draft. AJ: Methodology, Supervision, Writing &#x2013; original draft, Resources, Conceptualization, Project administration, Writing &#x2013; review &#x0026; editing.</p>
</sec>
<sec sec-type="COI-statement" id="sec28">
<title>Conflict of interest</title>
<p>The author(s) declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
<p>The author AJ declared that they were an editorial board member of Frontiers, at the time of submission. This had no impact on the peer review process and the final decision.</p>
</sec>
<sec sec-type="correction-note" id="sec029">
<title>Correction note</title>
<p>A correction has been made to this article. Details can be found at: <ext-link xlink:href="https://doi.org/10.3389/fnhum.2026.1804299" ext-link-type="uri">10.3389/fnhum.2026.1804299</ext-link>.</p>
</sec>
<sec sec-type="ai-statement" id="sec29">
<title>Generative AI statement</title>
<p>The author(s) declared that Generative AI was not used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p>
</sec>
<sec sec-type="disclaimer" id="sec30">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="ref1"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Benedek</surname><given-names>M.</given-names></name> <name><surname>Jauk</surname><given-names>E.</given-names></name> <name><surname>Beaty</surname><given-names>R.</given-names></name> <name><surname>Fink</surname><given-names>A.</given-names></name> <name><surname>Koschutnig</surname><given-names>K.</given-names></name> <name><surname>Neubauer</surname><given-names>A. C.</given-names></name></person-group> (<year>2016</year>). <article-title>Brain mechanisms associated with internally directed attention and self-generated thought</article-title>. <source>Sci. Rep.</source> <volume>6</volume>:<fpage>22959</fpage>. doi: <pub-id pub-id-type="doi">10.1038/srep22959</pub-id>, <pub-id pub-id-type="pmid">26960259</pub-id></mixed-citation></ref>
<ref id="ref2"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Blakemore</surname><given-names>S. J.</given-names></name></person-group> (<year>2008</year>). <article-title>The social brain in adolescence</article-title>. <source>Nat. Rev. Neurosci.</source> <volume>9</volume>, <fpage>267</fpage>&#x2013;<lpage>277</lpage>. doi: <pub-id pub-id-type="doi">10.1038/nrn2353</pub-id>, <pub-id pub-id-type="pmid">18354399</pub-id></mixed-citation></ref>
<ref id="ref3"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Bradley</surname><given-names>M. M.</given-names></name> <name><surname>Miccoli</surname><given-names>L.</given-names></name> <name><surname>Escrig</surname><given-names>M. A.</given-names></name> <name><surname>Lang</surname><given-names>P. J.</given-names></name></person-group> (<year>2008</year>). <article-title>The pupil as a measure of emotional arousal and autonomic activation</article-title>. <source>Psychophysiology</source> <volume>45</volume>, <fpage>602</fpage>&#x2013;<lpage>607</lpage>. doi: <pub-id pub-id-type="doi">10.1111/j.1469-8986.2008.00654.x</pub-id>, <pub-id pub-id-type="pmid">18282202</pub-id></mixed-citation></ref>
<ref id="ref4"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Corbetta</surname><given-names>M.</given-names></name> <name><surname>Shulman</surname><given-names>G. L.</given-names></name></person-group> (<year>2002</year>). <article-title>Control of goal-directed and stimulus-driven attention in the brain</article-title>. <source>Nat. Rev. Neurosci.</source> <volume>3</volume>, <fpage>201</fpage>&#x2013;<lpage>215</lpage>. doi: <pub-id pub-id-type="doi">10.1038/nrn755</pub-id>, <pub-id pub-id-type="pmid">11994752</pub-id></mixed-citation></ref>
<ref id="ref5"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Davis</surname><given-names>F. C.</given-names></name> <name><surname>Somerville</surname><given-names>L. H.</given-names></name> <name><surname>Ruberry</surname><given-names>E. J.</given-names></name> <name><surname>Berry</surname><given-names>A. B.</given-names></name> <name><surname>Shin</surname><given-names>L. M.</given-names></name> <name><surname>Whalen</surname><given-names>P. J.</given-names></name></person-group> (<year>2011</year>). <article-title>A tale of two negatives: differential memory modulation by threat-related facial expressions</article-title>. <source>Emotion</source> <volume>11</volume>, <fpage>647</fpage>&#x2013;<lpage>655</lpage>. doi: <pub-id pub-id-type="doi">10.1037/a0021625</pub-id>, <pub-id pub-id-type="pmid">21668114</pub-id></mixed-citation></ref>
<ref id="ref6"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Davis</surname><given-names>M.</given-names></name> <name><surname>Whalen</surname><given-names>P. J.</given-names></name></person-group> (<year>2001</year>). <article-title>The amygdala: vigilance and emotion</article-title>. <source>Mol. Psychiatry</source> <volume>6</volume>, <fpage>13</fpage>&#x2013;<lpage>34</lpage>. doi: <pub-id pub-id-type="doi">10.1038/sj.mp.4000812</pub-id>, <pub-id pub-id-type="pmid">11244481</pub-id></mixed-citation></ref>
<ref id="ref7"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>de Gelder</surname><given-names>B.</given-names></name></person-group> (<year>2006</year>). <article-title>Towards the neurobiology of emotional body language</article-title>. <source>Nat. Rev. Neurosci.</source> <volume>7</volume>, <fpage>242</fpage>&#x2013;<lpage>249</lpage>. doi: <pub-id pub-id-type="doi">10.1038/nrn1872</pub-id>, <pub-id pub-id-type="pmid">16495945</pub-id></mixed-citation></ref>
<ref id="ref8"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>de Gelder</surname><given-names>B.</given-names></name> <name><surname>van Honk</surname><given-names>J.</given-names></name> <name><surname>Tamietto</surname><given-names>M.</given-names></name></person-group> (<year>2011</year>). <article-title>Emotion in the brain: of low roads, high roads and roads less travelled</article-title>. <source>Nat. Rev. Neurosci.</source> <volume>12</volume>:<fpage>425</fpage>. doi: <pub-id pub-id-type="doi">10.1038/nrn2920-c1</pub-id>, <pub-id pub-id-type="pmid">21673722</pub-id></mixed-citation></ref>
<ref id="ref9"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Deen</surname><given-names>B.</given-names></name> <name><surname>Koldewyn</surname><given-names>K.</given-names></name> <name><surname>Kanwisher</surname><given-names>N.</given-names></name> <name><surname>Saxe</surname><given-names>R.</given-names></name></person-group> (<year>2015</year>). <article-title>Functional organization of social perception and cognition in the superior temporal sulcus</article-title>. <source>Cereb. Cortex</source> <volume>25</volume>, <fpage>4596</fpage>&#x2013;<lpage>4609</lpage>. doi: <pub-id pub-id-type="doi">10.1093/cercor/bhv111</pub-id>, <pub-id pub-id-type="pmid">26048954</pub-id></mixed-citation></ref>
<ref id="ref10"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ekman</surname><given-names>P.</given-names></name></person-group> (<year>1992</year>). <article-title>An argument for basic emotions</article-title>. <source>Cogn. Emot.</source> <volume>6</volume>, <fpage>169</fpage>&#x2013;<lpage>200</lpage>. doi: <pub-id pub-id-type="doi">10.1080/02699939208411068</pub-id></mixed-citation></ref>
<ref id="ref11"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Enzi</surname><given-names>B.</given-names></name> <name><surname>Amirie</surname><given-names>S.</given-names></name> <name><surname>Br&#x00FC;ne</surname><given-names>M.</given-names></name></person-group> (<year>2016</year>). <article-title>Empathy for pain-related dorsolateral prefrontal activity is modulated by angry face perception</article-title>. <source>Exp. Brain Res.</source> <volume>234</volume>, <fpage>3335</fpage>&#x2013;<lpage>3345</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s00221-016-4731-4</pub-id>, <pub-id pub-id-type="pmid">27447790</pub-id></mixed-citation></ref>
<ref id="ref12"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Fairhall</surname><given-names>S. L.</given-names></name> <name><surname>Ishai</surname><given-names>A.</given-names></name></person-group> (<year>2007</year>). <article-title>Effective connectivity within the distributed cortical network for face perception</article-title>. <source>Cereb. Cortex</source> <volume>17</volume>, <fpage>2400</fpage>&#x2013;<lpage>2406</lpage>. doi: <pub-id pub-id-type="doi">10.1093/cercor/bhl148</pub-id>, <pub-id pub-id-type="pmid">17190969</pub-id></mixed-citation></ref>
<ref id="ref13"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Fr&#x00E4;ssle</surname><given-names>S.</given-names></name> <name><surname>Paulus</surname><given-names>F. M.</given-names></name> <name><surname>Krach</surname><given-names>S.</given-names></name> <name><surname>Schweinberger</surname><given-names>S. R.</given-names></name> <name><surname>Stephan</surname><given-names>K. E.</given-names></name> <name><surname>Jansen</surname><given-names>A.</given-names></name></person-group> (<year>2016</year>). <article-title>Mechanisms of hemispheric lateralization: asymmetric interhemispheric recruitment in the face perception network</article-title>. <source>NeuroImage</source> <volume>124</volume>, <fpage>977</fpage>&#x2013;<lpage>988</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2015.09.055</pub-id>, <pub-id pub-id-type="pmid">26439515</pub-id></mixed-citation></ref>
<ref id="ref14"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Fusar-Poli</surname><given-names>P.</given-names></name> <name><surname>Placentino</surname><given-names>A.</given-names></name> <name><surname>Carletti</surname><given-names>F.</given-names></name> <name><surname>Allen</surname><given-names>P.</given-names></name> <name><surname>Landi</surname><given-names>P.</given-names></name> <name><surname>Abbamonte</surname><given-names>M.</given-names></name> <etal/></person-group>. (<year>2009a</year>). <article-title>Laterality effect on emotional faces processing: ALE meta-analysis of evidence</article-title>. <source>Neurosci. Lett.</source> <volume>452</volume>, <fpage>262</fpage>&#x2013;<lpage>267</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neulet.2009.01.065</pub-id>, <pub-id pub-id-type="pmid">19348735</pub-id></mixed-citation></ref>
<ref id="ref15"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Fusar-Poli</surname><given-names>P.</given-names></name> <name><surname>Placentino</surname><given-names>A.</given-names></name> <name><surname>Carletti</surname><given-names>F.</given-names></name> <name><surname>Landi</surname><given-names>P.</given-names></name> <name><surname>Allen</surname><given-names>P.</given-names></name> <name><surname>Surguladze</surname><given-names>S.</given-names></name> <etal/></person-group>. (<year>2009b</year>). <article-title>Functional atlas of emotional faces processing: a voxel-based meta-analysis of 105 functional magnetic resonance imaging studies</article-title>. <source>J. Psychiatry Neurosci.</source> <volume>34</volume>, <fpage>418</fpage>&#x2013;<lpage>432</lpage>. doi: <pub-id pub-id-type="doi">10.1139/jpn.0953</pub-id></mixed-citation></ref>
<ref id="ref16"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Grosbras</surname><given-names>M. H.</given-names></name> <name><surname>Beaton</surname><given-names>S.</given-names></name> <name><surname>Eickhoff</surname><given-names>S. B.</given-names></name></person-group> (<year>2012</year>). <article-title>Brain regions involved in human movement perception: a quantitative voxel-based meta-analysis</article-title>. <source>Hum. Brain Mapp.</source> <volume>33</volume>, <fpage>431</fpage>&#x2013;<lpage>454</lpage>. doi: <pub-id pub-id-type="doi">10.1002/hbm.21222</pub-id>, <pub-id pub-id-type="pmid">21391275</pub-id></mixed-citation></ref>
<ref id="ref17"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Grosbras</surname><given-names>M. H.</given-names></name> <name><surname>Paus</surname><given-names>T.</given-names></name></person-group> (<year>2006</year>). <article-title>Brain networks involved in viewing angry hands or faces</article-title>. <source>Cereb. Cortex</source> <volume>16</volume>, <fpage>1087</fpage>&#x2013;<lpage>1096</lpage>. doi: <pub-id pub-id-type="doi">10.1093/cercor/bhj050</pub-id>, <pub-id pub-id-type="pmid">16221928</pub-id></mixed-citation></ref>
<ref id="ref18"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Han</surname><given-names>S.</given-names></name> <name><surname>Jiang</surname><given-names>Y.</given-names></name> <name><surname>Mao</surname><given-names>L.</given-names></name> <name><surname>Humphreys</surname><given-names>G. W.</given-names></name> <name><surname>Gu</surname><given-names>H.</given-names></name></person-group> (<year>2005</year>). <article-title>Attentional modulation of perceptual grouping in human visual cortex: functional MRI studies</article-title>. <source>Hum. Brain Mapp.</source> <volume>25</volume>, <fpage>424</fpage>&#x2013;<lpage>432</lpage>. doi: <pub-id pub-id-type="doi">10.1002/hbm.20119</pub-id>, <pub-id pub-id-type="pmid">15852379</pub-id></mixed-citation></ref>
<ref id="ref19"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Haxby</surname><given-names>J. V.</given-names></name> <name><surname>Hoffman</surname><given-names>E. A.</given-names></name> <name><surname>Gobbini</surname><given-names>M. I.</given-names></name></person-group> (<year>2000</year>). <article-title>The distributed human neural system for face perception</article-title>. <source>Trends Cogn. Sci.</source> <volume>4</volume>, <fpage>223</fpage>&#x2013;<lpage>233</lpage>. doi: <pub-id pub-id-type="doi">10.1016/S1364-6613(00)01482-0</pub-id>, <pub-id pub-id-type="pmid">10827445</pub-id></mixed-citation></ref>
<ref id="ref20"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Haxby</surname><given-names>J. V.</given-names></name> <name><surname>Hoffman</surname><given-names>E. A.</given-names></name> <name><surname>Gobbini</surname><given-names>M. I.</given-names></name></person-group> (<year>2002</year>). <article-title>Human neural systems for face recognition and social communication</article-title>. <source>Biol. Psychiatry</source> <volume>51</volume>, <fpage>59</fpage>&#x2013;<lpage>67</lpage>. doi: <pub-id pub-id-type="doi">10.1016/S0006-3223(01)01330-0</pub-id>, <pub-id pub-id-type="pmid">11801231</pub-id></mixed-citation></ref>
<ref id="ref21"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Honma</surname><given-names>M.</given-names></name> <name><surname>Tanaka</surname><given-names>Y.</given-names></name> <name><surname>Osada</surname><given-names>Y.</given-names></name> <name><surname>Kuriyama</surname><given-names>K.</given-names></name></person-group> (<year>2012</year>). <article-title>Perceptual&#x2014;and not physical&#x2014;eye contact elicits pupillary dilation</article-title>. <source>Biol. Psychol.</source> <volume>89</volume>, <fpage>112</fpage>&#x2013;<lpage>116</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.biopsycho.2011.09.015</pub-id>, <pub-id pub-id-type="pmid">21982748</pub-id></mixed-citation></ref>
<ref id="ref22"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kahneman</surname><given-names>D.</given-names></name> <name><surname>Beatty</surname><given-names>J.</given-names></name></person-group> (<year>1966</year>). <article-title>Pupil diameter and load on memory</article-title>. <source>Science</source> <volume>154</volume>, <fpage>1583</fpage>&#x2013;<lpage>1585</lpage>. doi: <pub-id pub-id-type="doi">10.1126/science.154.3756.1583</pub-id>, <pub-id pub-id-type="pmid">5924930</pub-id></mixed-citation></ref>
<ref id="ref23"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kahneman</surname><given-names>D.</given-names></name> <name><surname>Wright</surname><given-names>P.</given-names></name></person-group> (<year>1971</year>). <article-title>Changes of pupil size and rehearsal strategies in a short-term memory task</article-title>. <source>Q. J. Exp. Psychol.</source> <volume>23</volume>, <fpage>187</fpage>&#x2013;<lpage>196</lpage>. doi: <pub-id pub-id-type="doi">10.1080/14640747108400239</pub-id>, <pub-id pub-id-type="pmid">5559707</pub-id></mixed-citation></ref>
<ref id="ref9001"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kahneman</surname><given-names>D.</given-names></name> <name><surname>Tursky</surname><given-names>B.</given-names></name> <name><surname>Shapiro</surname><given-names>D.</given-names></name> <name><surname>Crider</surname><given-names>A.</given-names></name></person-group> (<year>1969</year>). <article-title>Pupillary, heart rate, and skin resistance changes during a mental task</article-title>. <source>J. Exp. Psychol.</source> <volume>79</volume>, <fpage>164</fpage>&#x2013;<lpage>167</lpage>. doi: <pub-id pub-id-type="doi">10.1037/h0026952</pub-id></mixed-citation></ref>
<ref id="ref24"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kessler</surname><given-names>R.</given-names></name> <name><surname>Rusch</surname><given-names>K. M.</given-names></name> <name><surname>Wende</surname><given-names>K. C.</given-names></name> <name><surname>Schuster</surname><given-names>V.</given-names></name> <name><surname>Jansen</surname><given-names>A.</given-names></name></person-group> (<year>2021</year>). <article-title>Revisiting the effective connectivity within the distributed cortical network for face perception</article-title>. <source>NeuroImage</source> <volume>1</volume>:<fpage>100045</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.ynirp.2021.100045</pub-id>, <pub-id pub-id-type="pmid">40568430</pub-id></mixed-citation></ref>
<ref id="ref25"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Klein</surname><given-names>F.</given-names></name> <name><surname>Iffland</surname><given-names>B.</given-names></name> <name><surname>Schindler</surname><given-names>S.</given-names></name> <name><surname>Wabnitz</surname><given-names>P.</given-names></name> <name><surname>Neuner</surname><given-names>F.</given-names></name></person-group> (<year>2015</year>). <article-title>This person is saying bad things about you: the influence of physically and socially threatening context information on the processing of inherently neutral faces</article-title>. <source>Cogn. Affect. Behav. Neurosci.</source> <volume>15</volume>, <fpage>736</fpage>&#x2013;<lpage>748</lpage>. doi: <pub-id pub-id-type="doi">10.3758/s13415-015-0361-8</pub-id>, <pub-id pub-id-type="pmid">25967930</pub-id></mixed-citation></ref>
<ref id="ref26"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Klein</surname><given-names>I.</given-names></name> <name><surname>Paradis</surname><given-names>A. L.</given-names></name> <name><surname>Poline</surname><given-names>J. B.</given-names></name> <name><surname>Kosslyn</surname><given-names>S. M.</given-names></name> <name><surname>Le Bihan</surname><given-names>D.</given-names></name></person-group> (<year>2000</year>). <article-title>Transient activity in the human calcarine cortex during visual-mental imagery: an event-related fMRI study</article-title>. <source>J. Cogn. Neurosci.</source> <volume>12</volume>, <fpage>15</fpage>&#x2013;<lpage>23</lpage>. doi: <pub-id pub-id-type="doi">10.1162/089892900564037</pub-id>, <pub-id pub-id-type="pmid">11506644</pub-id></mixed-citation></ref>
<ref id="ref27"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kret</surname><given-names>M. E.</given-names></name> <name><surname>Stekelenburg</surname><given-names>J. J.</given-names></name> <name><surname>Roelofs</surname><given-names>K.</given-names></name> <name><surname>de Gelder</surname><given-names>B.</given-names></name></person-group> (<year>2013</year>). <article-title>Perception of face and body expressions using electromyography, pupillometry and gaze measures</article-title>. <source>Front. Psychol.</source> <volume>4</volume>:<fpage>28</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2013.00028</pub-id>, <pub-id pub-id-type="pmid">23403886</pub-id></mixed-citation></ref>
<ref id="ref28"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Langner</surname><given-names>O.</given-names></name> <name><surname>Dotsch</surname><given-names>R.</given-names></name> <name><surname>Bijlstra</surname><given-names>G.</given-names></name> <name><surname>Wigboldus</surname><given-names>D. H. J.</given-names></name> <name><surname>Hawk</surname><given-names>S. T.</given-names></name> <name><surname>van Knippenberg</surname><given-names>A.</given-names></name></person-group> (<year>2010</year>). <article-title>Presentation and validation of the Radboud faces database</article-title>. <source>Cogn. Emot.</source> <volume>24</volume>, <fpage>1377</fpage>&#x2013;<lpage>1388</lpage>. doi: <pub-id pub-id-type="doi">10.1080/02699930903485076</pub-id></mixed-citation></ref>
<ref id="ref29"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Lanzilotto</surname><given-names>M.</given-names></name> <name><surname>Dal Monte</surname><given-names>O.</given-names></name> <name><surname>Diano</surname><given-names>M.</given-names></name> <name><surname>Panormita</surname><given-names>M.</given-names></name> <name><surname>Battaglia</surname><given-names>S.</given-names></name> <name><surname>Celeghin</surname><given-names>A.</given-names></name> <etal/></person-group>. (<year>2025</year>). <article-title>Learning to fear novel stimuli by observing others in the social affordance framework</article-title>. <source>Neurosci. Biobehav. Rev.</source> <volume>169</volume>:<fpage>106006</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neubiorev.2025.106006</pub-id>, <pub-id pub-id-type="pmid">39788170</pub-id></mixed-citation></ref>
<ref id="ref9002"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Murphy</surname><given-names>P. R.</given-names></name> <name><surname>Robertson</surname><given-names>I. H.</given-names></name> <name><surname>Balsters</surname><given-names>J. H.</given-names></name> <name><surname>O&#x2019;Connell</surname><given-names>R. G.</given-names></name></person-group> (<year>2011</year>). <article-title>Pupillometry and P3 index the locus coeruleus-noradrenergic arousal function in humans</article-title>. <source>Psychophysiology</source> <volume>48</volume>, <fpage>1532</fpage>&#x2013;<lpage>1543</lpage>. doi: <pub-id pub-id-type="doi">10.1111/j.1469-8986.2011.01226.x</pub-id></mixed-citation></ref>
<ref id="ref30"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Nagy</surname><given-names>E.</given-names></name> <name><surname>Prentice</surname><given-names>L.</given-names></name> <name><surname>Wakeling</surname><given-names>T.</given-names></name></person-group> (<year>2021</year>). <article-title>Atypical facial emotion recognition in children with ASD: exploratory analysis on task demands</article-title>. <source>Perception</source> <volume>50</volume>, <fpage>819</fpage>&#x2013;<lpage>833</lpage>. doi: <pub-id pub-id-type="doi">10.1177/03010066211038154</pub-id>, <pub-id pub-id-type="pmid">34428977</pub-id></mixed-citation></ref>
<ref id="ref31"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Neumann</surname><given-names>M. F.</given-names></name> <name><surname>Mohamed</surname><given-names>T. N.</given-names></name> <name><surname>Schweinberger</surname><given-names>S. R.</given-names></name></person-group> (<year>2011</year>). <article-title>Face and object encoding under perceptual load: ERP evidence</article-title>. <source>NeuroImage</source> <volume>54</volume>, <fpage>3021</fpage>&#x2013;<lpage>3027</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2010.10.075</pub-id>, <pub-id pub-id-type="pmid">21044688</pub-id></mixed-citation></ref>
<ref id="ref9003"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Oldfield</surname><given-names>R. C.</given-names></name></person-group> (<year>1971</year>). <article-title>The assessment and analysis of handedness: the Edinburgh inventory</article-title>. <source>Neuropsychologia</source> <volume>9</volume>, <fpage>97</fpage>&#x2013;<lpage>113</lpage>. doi: <pub-id pub-id-type="doi">10.1016/0028-3932(71)90067-4</pub-id></mixed-citation></ref>
<ref id="ref32"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Par&#x00E9;</surname><given-names>S.</given-names></name> <name><surname>Bleau</surname><given-names>M.</given-names></name> <name><surname>Dricot</surname><given-names>L.</given-names></name> <name><surname>Ptito</surname><given-names>M.</given-names></name> <name><surname>Kupers</surname><given-names>R.</given-names></name></person-group> (<year>2023</year>). <article-title>Brain structural changes in blindness: a systematic review and an anatomical likelihood estimation (ALE) meta-analysis</article-title>. <source>Neurosci. Biobehav. Rev.</source> <volume>150</volume>:<fpage>105165</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neubiorev.2023.105165</pub-id>, <pub-id pub-id-type="pmid">37054803</pub-id></mixed-citation></ref>
<ref id="ref9004"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Puffet</surname><given-names>A. S.</given-names></name> <name><surname>Rigoulot</surname><given-names>S.</given-names></name></person-group> (<year>2025</year>). <article-title>The role of cognitive load in automatic integration of emotional information from face and body</article-title>. <source>Sci. Rep.</source> <volume>15</volume>, <fpage>28184</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41598-025-12511-8</pub-id></mixed-citation></ref>
<ref id="ref9005"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Rossion</surname><given-names>B.</given-names></name></person-group> (<year>2008</year>). <article-title>Constraining the cortical face network by neuroimaging studies of acquired prosopagnosia</article-title>. <source>Neuroimage</source> <volume>40</volume>, <fpage>423</fpage>&#x2013;<lpage>426</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2007.10.047</pub-id></mixed-citation></ref>
<ref id="ref33"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Tamietto</surname><given-names>M.</given-names></name> <name><surname>Castelli</surname><given-names>L.</given-names></name> <name><surname>Vighetti</surname><given-names>S.</given-names></name> <name><surname>Perozzo</surname><given-names>P.</given-names></name> <name><surname>Geminiani</surname><given-names>G.</given-names></name> <name><surname>Weiskrantz</surname><given-names>L.</given-names></name> <etal/></person-group>. (<year>2009</year>). <article-title>Unseen facial and bodily expressions trigger fast emotional reactions</article-title>. <source>Proc. Natl. Acad. Sci.</source> <volume>106</volume>, <fpage>17661</fpage>&#x2013;<lpage>17666</lpage>. doi: <pub-id pub-id-type="doi">10.1073/pnas.0908994106</pub-id>, <pub-id pub-id-type="pmid">19805044</pub-id></mixed-citation></ref>
<ref id="ref34"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Uljarevic</surname><given-names>M.</given-names></name> <name><surname>Hamilton</surname><given-names>A.</given-names></name></person-group> (<year>2013</year>). <article-title>Recognition of emotions in autism: a meta-analysis</article-title>. <source>J. Autism Dev. Disord.</source> <volume>43</volume>, <fpage>1517</fpage>&#x2013;<lpage>1526</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s10803-012-1695-5</pub-id>, <pub-id pub-id-type="pmid">23114566</pub-id></mixed-citation></ref>
<ref id="ref35"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Vinck</surname><given-names>M.</given-names></name> <name><surname>Batista-Brito</surname><given-names>R.</given-names></name> <name><surname>Knoblich</surname><given-names>U.</given-names></name> <name><surname>Cardin</surname><given-names>J. A.</given-names></name></person-group> (<year>2015</year>). <article-title>Arousal and locomotion make distinct contributions to cortical activity patterns and visual encoding</article-title>. <source>Neuron</source> <volume>86</volume>, <fpage>740</fpage>&#x2013;<lpage>754</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuron.2015.03.028</pub-id>, <pub-id pub-id-type="pmid">25892300</pub-id></mixed-citation></ref>
<ref id="ref9006"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Vuilleumier</surname><given-names>P.</given-names></name> <name><surname>Armony</surname><given-names>J. L.</given-names></name> <name><surname>Driver</surname><given-names>J.</given-names></name> <name><surname>Dolan</surname><given-names>R. J.</given-names></name></person-group> (<year>2003</year>). <article-title>Distinct spatial frequency sensitivities for processing faces and emotional expressions</article-title>. <source>Nat. Neurosci.</source> <volume>6</volume>, <fpage>624</fpage>&#x2013;<lpage>631</lpage>. doi: <pub-id pub-id-type="doi">10.1038/nn1057</pub-id></mixed-citation></ref>
<ref id="ref36"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Vytal</surname><given-names>K.</given-names></name> <name><surname>Hamann</surname><given-names>S.</given-names></name></person-group> (<year>2010</year>). <article-title>Neuroimaging support for discrete neural correlates of basic emotions: a meta-analysis</article-title>. <source>J. Cogn. Neurosci.</source> <volume>22</volume>, <fpage>2864</fpage>&#x2013;<lpage>2885</lpage>. doi: <pub-id pub-id-type="doi">10.1162/jocn.2009.21366</pub-id></mixed-citation></ref>
<ref id="ref37"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Watson</surname><given-names>R.</given-names></name> <name><surname>Huis in 't Veld</surname><given-names>E. M.</given-names></name> <name><surname>de Gelder</surname><given-names>B.</given-names></name></person-group> (<year>2016</year>). <article-title>The neural basis of individual face and object perception</article-title>. <source>Front. Hum. Neurosci.</source> <volume>10</volume>:<fpage>66</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fnhum.2016.00066</pub-id></mixed-citation></ref>
<ref id="ref38"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Wierda</surname><given-names>S. M.</given-names></name> <name><surname>van Rijn</surname><given-names>H.</given-names></name> <name><surname>Taatgen</surname><given-names>N. A.</given-names></name> <name><surname>Martens</surname><given-names>S.</given-names></name></person-group> (<year>2012</year>). <article-title>Pupil dilation deconvolution reveals the dynamics of attention at high temporal resolution</article-title>. <source>Proc. Natl. Acad. Sci.</source> <volume>109</volume>, <fpage>8456</fpage>&#x2013;<lpage>8460</lpage>. doi: <pub-id pub-id-type="doi">10.1073/pnas.1201858109</pub-id>, <pub-id pub-id-type="pmid">22586101</pub-id></mixed-citation></ref>
<ref id="ref9007"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Willenbockel</surname><given-names>V.</given-names></name> <name><surname>Sadr</surname><given-names>J.</given-names></name> <name><surname>Fiset</surname><given-names>D.</given-names></name> <name><surname>Horne</surname><given-names>G. O.</given-names></name> <name><surname>Gosselin</surname><given-names>F.</given-names></name> <name><surname>Tanaka</surname><given-names>J. W.</given-names></name></person-group> (<year>2010</year>). <article-title>Controlling low-level image properties: The SHINE toolbox</article-title>. <source>Behav. Res. Methods</source> <volume>42</volume>, <fpage>671</fpage>&#x2013;<lpage>684</lpage>. doi: <pub-id pub-id-type="doi">10.3758/BRM.42.3.671</pub-id></mixed-citation></ref>
<ref id="ref39"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Zinchenko</surname><given-names>O.</given-names></name> <name><surname>Yaple</surname><given-names>Z. A.</given-names></name> <name><surname>Arsalidou</surname><given-names>M.</given-names></name></person-group> (<year>2018</year>). <article-title>Brain responses to dynamic facial expressions: a normative Meta-analysis</article-title>. <source>Front. Hum. Neurosci.</source> <volume>12</volume>:<fpage>227</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fnhum.2018.00227</pub-id>, <pub-id pub-id-type="pmid">29922137</pub-id></mixed-citation></ref>
</ref-list>
<fn-group>
<fn fn-type="custom" custom-type="edited-by" id="fn0001">
<p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/1662141/overview">Matteo Toscani</ext-link>, Bournemouth University, United Kingdom</p>
</fn>
<fn fn-type="custom" custom-type="reviewed-by" id="fn0002">
<p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2220908/overview">Maria-Chiara Villa</ext-link>, University of Turin, Italy</p>
<p><ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2928244/overview">Gantian Huang</ext-link>, Sichuan University, China</p>
</fn>
</fn-group>
</back>
</article>