<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Psychol.</journal-id>
<journal-title>Frontiers in Psychology</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Psychol.</abbrev-journal-title>
<issn pub-type="epub">1664-1078</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpsyg.2024.1379652</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Psychology</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Synchrony or asynchrony: development of facial expression recognition from childhood to adolescence based on large-scale evidence</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" equal-contrib="yes">
<name><surname>Wang</surname> <given-names>Yihan</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="author-notes" rid="fn0001"><sup>&#x2020;</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
<contrib contrib-type="author" equal-contrib="yes">
<name><surname>Luo</surname> <given-names>Qian</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="author-notes" rid="fn0001"><sup>&#x2020;</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Zhang</surname> <given-names>Yuanmeng</given-names></name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Zhao</surname> <given-names>Ke</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/247442/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>State Key Laboratory of Brain and Cognitive Science, Institute of Psychology, Chinese Academy of Sciences</institution>, <addr-line>Beijing</addr-line>, <country>China</country></aff>
<aff id="aff2"><sup>2</sup><institution>Department of Psychology, University of Chinese Academy of Sciences</institution>, <addr-line>Beijing</addr-line>, <country>China</country></aff>
<aff id="aff3"><sup>3</sup><institution>College of Letters and Science, University of California, Berkeley</institution>, <addr-line>Berkeley, CA</addr-line>, <country>United States</country></aff>
<author-notes>
<fn fn-type="edited-by" id="fn0002"><p>Edited by: Wenfeng Chen, Renmin University of China, China</p>
</fn>
<fn fn-type="edited-by" id="fn0003"><p>Reviewed by: Jie Zhang, University College London, United Kingdom</p>
<p>Weiwei Peng, Shenzhen University, China</p>
</fn>
<corresp id="c001">&#x002A;Correspondence: Ke Zhao, <email>zhaok@psych.ac.cn</email></corresp>
<fn fn-type="equal" id="fn0001"><p><sup>&#x2020;</sup>These authors have contributed equally to this work</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>25</day>
<month>04</month>
<year>2024</year>
</pub-date>
<pub-date pub-type="collection">
<year>2024</year>
</pub-date>
<volume>15</volume>
<elocation-id>1379652</elocation-id>
<history>
<date date-type="received">
<day>31</day>
<month>01</month>
<year>2024</year>
</date>
<date date-type="accepted">
<day>09</day>
<month>04</month>
<year>2024</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2024 Wang, Luo, Zhang and Zhao.</copyright-statement>
<copyright-year>2024</copyright-year>
<copyright-holder>Wang, Luo, Zhang and Zhao</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<p>The development of facial expression recognition ability in children is crucial for their emotional cognition and social interactions. In this study, 510 children aged between 6 and 15 participated in a two forced-choice task of facial expression recognition. The findings supported that recognition of the six basic facial expressions reached a relatively stable mature level around 8&#x2013;9&#x2009;years old. Additionally, model fitting results indicated that children showed the most significant improvement in recognizing expressions of disgust, closely followed by fear. Conversely, recognition of expressions of happiness and sadness showed slower improvement across different age groups. Regarding gender differences, girls exhibited a more pronounced advantage. Further model fitting revealed that boys showed more pronounced improvements in recognizing expressions of disgust, fear, and anger, while girls showed more pronounced improvements in recognizing expressions of surprise, sadness, and happiness. These clear findings suggested the synchronous developmental trajectory of facial expression recognition from childhood to adolescence, likely influenced by socialization processes and interactions related to brain maturation.</p>
</abstract>
<kwd-group>
<kwd>children</kwd>
<kwd>facial expression</kwd>
<kwd>emotion recognition</kwd>
<kwd>gender difference</kwd>
<kwd>age</kwd>
</kwd-group>
<counts>
<fig-count count="5"/>
<table-count count="0"/>
<equation-count count="0"/>
<ref-count count="57"/>
<page-count count="9"/>
<word-count count="6890"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Emotion Science</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="sec1">
<label>1</label>
<title>Introduction</title>
<p>Recognizing facial expressions is crucial for social interactions because they offer valuable insights into individuals&#x2019; internal emotions (<xref ref-type="bibr" rid="ref22">Herba and Phillips, 2004</xref>; <xref ref-type="bibr" rid="ref52">Watling et al., 2012</xref>; <xref ref-type="bibr" rid="ref30">Liu et al., 2021</xref>). Via facial expressions, individuals can gather abundant information to assess others&#x2019; emotions and generate appropriate responses (<xref ref-type="bibr" rid="ref11">Cunningham and Odom, 1986</xref>; <xref ref-type="bibr" rid="ref15">Gao and Maurer, 2009</xref>; <xref ref-type="bibr" rid="ref24">Johnston et al., 2011</xref>). The ability to recognize facial expressions starts to develop in infancy, as shown by research indicating that 2-month-old infants display sustained attention towards their mother&#x2019;s happy expressions (<xref ref-type="bibr" rid="ref42">Rochat et al., 2002</xref>). The developmental trajectory of children&#x2019;s ability to recognize facial expressions serves as a critical measure for evaluating their emotional cognitive development (<xref ref-type="bibr" rid="ref39">Ogren and Johnson, 2021</xref>). Facial expression recognition also plays a pivotal role in social interactions (<xref ref-type="bibr" rid="ref4">Bayet and Nelson, 2019</xref>), thus contributing to enhancing children&#x2019;s social adaptations (<xref ref-type="bibr" rid="ref39">Ogren and Johnson, 2021</xref>).</p>
<p>Considering the critical role of facial expression recognition in children&#x2019;s social interactions and emotional development, comprehending the developmental trajectory of facial expressions from childhood to adolescence is of paramount importance. Nevertheless, this field currently faces a shortage of research, exacerbated by relatively small sample sizes (<xref ref-type="bibr" rid="ref15">Gao and Maurer, 2009</xref>; <xref ref-type="bibr" rid="ref43">Rodger et al., 2015</xref>).Consequently, there are unresolved issues in facial expression recognition that require clear clarification. A primary concern is to ascertain whether there are distinct developmental stages or pivotal moments in the progression of facial expression recognition ability from childhood to adolescence. Several studies have documented notable enhancements in children&#x2019;s recognition accuracy between the ages of 3 and 7, followed by a subsequent increase in recognition speed from 7 to 10&#x2009;years old (<xref ref-type="bibr" rid="ref48">Sonneville et al., 2002</xref>; <xref ref-type="bibr" rid="ref12">Durand et al., 2007</xref>; <xref ref-type="bibr" rid="ref53">Widen and Russell, 2008</xref>). Another study identified two primary stages in the developmental continuum of facial expression recognition: spanning from ages 5 to 12 and extending from adolescence into adulthood (<xref ref-type="bibr" rid="ref43">Rodger et al., 2015</xref>). However, some studies have challenged this categorization, showing that 10-year-old children achieve nearly identical accuracy scores as 16-year-old adolescents in complex emotion recognition tasks (<xref ref-type="bibr" rid="ref37">Naruse et al., 2013</xref>; <xref ref-type="bibr" rid="ref28">Lawrence et al., 2015</xref>). A recent study additionally found that 8-year-olds surpassed 5-year-olds and performed equally to older adults (<xref ref-type="bibr" rid="ref46">Ruffman et al., 2023</xref>). Thus, researchers in this field have not yet reached a consensus on the developmental stages of recognition ability. One plausible explanation for this inconsistency is the intricate nature of diverse emotions, which complicates the developmental process (<xref ref-type="bibr" rid="ref24">Johnston et al., 2011</xref>; <xref ref-type="bibr" rid="ref28">Lawrence et al., 2015</xref>; <xref ref-type="bibr" rid="ref43">Rodger et al., 2015</xref>; <xref ref-type="bibr" rid="ref50">Vesker et al., 2018</xref>; <xref ref-type="bibr" rid="ref31">Louisa et al., 2019</xref>). Therefore, a larger sample size is necessary to study the synchronous characteristics of facial expression recognition development from childhood to adolescence.</p>
<p>Another concern in facial expression recognition development is gender differences. A meta-analysis has examined differences in decoding non-verbal emotional signals, including facial expressions, vocal prosody, postures, and gestures, between genders (<xref ref-type="bibr" rid="ref19">Hall, 1978</xref>). The findings revealed a trend where female participants consistently outperformed their male counterparts in identifying and interpreting non-verbal cues. Furthermore, another meta-analysis supported a slight, yet robust, advantage for females in facial expression recognition from infancy through adolescence (<xref ref-type="bibr" rid="ref34">McClure, 2000</xref>). The superiority of female participants in processing facial expressions of emotion can be explained by anatomical differences, varying rates of maturation of neurological structures responsible for emotion processing, and differences in social experiences (<xref ref-type="bibr" rid="ref5">Bourne, 2005</xref>). While gender differences in emotion processing among children have not been consistently observed (<xref ref-type="bibr" rid="ref18">Gross and Ballif, 1991</xref>), females, on average, show greater proficiency in understanding the emotional disposition of both genders (<xref ref-type="bibr" rid="ref26">Kiecolt-Glaser and Newton, 2001</xref>; <xref ref-type="bibr" rid="ref44">Rosip and Hall, 2004</xref>; <xref ref-type="bibr" rid="ref38">Neff and Karney, 2005</xref>). However, conflicting findings regarding the influence of gender on children&#x2019;s expression recognition persist. Some studies have suggested that preschool and school-age girls demonstrate slight yet consistent advantages in emotion recognition. This accelerated development in girls&#x2019; initial expression recognition ability may be attributed to their early exposure to a more expressive environment since infancy (<xref ref-type="bibr" rid="ref32">Mancini et al., 2013</xref>; <xref ref-type="bibr" rid="ref9">Cameron et al., 2018</xref>). Conversely, another study utilizing matching paradigms found no gender differences in emotion processing among children (<xref ref-type="bibr" rid="ref21">Herba et al., 2006</xref>). The gender disparities in facial expression recognition between boys and girls from childhood to adolescence call for evidence from substantial samples of continuously age-staged data.</p>
<p>As discussed above, the developmental trajectory of facial expression recognition ability from childhood to adolescence remains unclear. Furthermore, there is a lack of consensus regarding the developmental processes of various emotions and the influence of gender differences on facial expression recognition. To tackle these challenges, the current study utilized a large sample and employed two forced-choice rapid facial expression recognition paradigms (<xref ref-type="bibr" rid="ref57">Zhao et al., 2017</xref>, <xref ref-type="bibr" rid="ref56">2020</xref>), systematically assessing the developmental characteristics of children aged 6 to 15 in recognizing six basic facial expressions (happiness, disgust, anger, fear, sadness, and surprise). The two forced-choice paradigms offer distinct advantages in studying facial expressions, providing a clear and direct assessment of emotional responses. By presenting participants with two options representing different emotions, researchers were able to precisely measure and compare participants&#x2019; facial expression responses to each option. This approach minimizes ambiguity and subjectivity in facial expression assessment, establishing a structured framework for participants to make choices that are easily quantifiable and analyzable. Moreover, this method facilitates comparisons between different emotions, enabling researchers to investigate the relative strength or preference for specific emotional expressions. Overall, the two forced-choice paradigm offers a systematic and controlled approach to studying facial expressions in children, resulting in more precise and reliable measurements of emotional responses.</p>
</sec>
<sec sec-type="methods" id="sec2">
<label>2</label>
<title>Methods</title>
<sec id="sec3">
<label>2.1</label>
<title>Participants</title>
<p>In this study, 510 children (273 boys and 237 girls) aged between 6 and 15(6&#x2009;years 0&#x2009;months old-15&#x2009;years 12&#x2009;months old, M<sub>age</sub>&#x2009;=&#x2009;10.53, SD&#x2009;=&#x2009;2.41) were recruited and divided by age into five groups. The group aged 6&#x2013;7&#x2009;years comprised 89 children (M<sub>age</sub>&#x2009;=&#x2009;7.17, SD&#x2009;=&#x2009;0.51; 49 boys). The group aged 8&#x2013;9&#x2009;years included 143 children (M<sub>age</sub>&#x2009;=&#x2009;9.04, SD&#x2009;=&#x2009;0.53; 71 boys). The group aged 10&#x2013;11&#x2009;years consisted of 118 children (M<sub>age</sub>&#x2009;=&#x2009;10.90, SD&#x2009;=&#x2009;0.56; 67 boys). The group aged 12&#x2013;13&#x2009;years comprised 104 children (M<sub>age</sub>&#x2009;=&#x2009;12.85, SD&#x2009;=&#x2009;0.58; 56 boys). The group aged 14&#x2013;15&#x2009;years included 56 children (M<sub>age</sub>&#x2009;=&#x2009;14.55, SD&#x2009;=&#x2009;0.39; 30 boys). All participants had normal vision and no known psychiatric disorders. Informed consent was obtained from each child&#x2019;s legal guardian, and assent was obtained from the child. The study protocol was approved by the Institutional Review Board (IRB) at the Institute of Psychology, Chinese Academy of Sciences, and conducted in accordance with the principles outlined in the Declaration of Helsinki.</p>
</sec>
<sec id="sec4">
<label>2.2</label>
<title>Materials and instruments</title>
<p>The present study utilized the Pictures of Facial Affect (POFA; <xref ref-type="bibr" rid="ref13">Ekman, 1976</xref>) to assess facial expression recognition. Sixty images depicting six basic emotions (happiness, disgust, anger, fear, sadness, and surprise) were selected from the POFA dataset. To ensure comprehensive coverage, each of the six emotions was paired with every other emotion, resulting in a total of 15 different combinations: happiness-fear, happiness-anger, happiness-disgust, happiness-surprise, happiness-sadness, fear-anger, fear-disgust, fear-surprise, fear-sadness, anger-disgust, anger-surprise, anger-sadness, disgust-surprise, disgust-sadness, and surprise-sadness. The task was programmed using E-prime Version 2.0 (Psychology Software Tools, Incorporated). The stimuli were presented on a desktop computer equipped with a 60-Hz LCD monitor, with a screen resolution of 1,366&#x2009;&#x00D7;&#x2009;768 pixels.</p>
</sec>
<sec id="sec5">
<label>2.3</label>
<title>Procedures</title>
<p>Prior to the experiment, all participants received instructions on how to respond during the two forced-choice paradigm. They were asked to put their left index finger on the &#x201C;f&#x201D; key and their right index finger on the &#x201C;j&#x201D; key, respectively. The experimental procedure was outlined in <xref ref-type="fig" rid="fig1">Figure 1</xref>.</p>
<fig position="float" id="fig1">
<label>Figure 1</label>
<caption>
<p>Schematic representation of the two forced-choice task.</p>
</caption>
<graphic xlink:href="fpsyg-15-1379652-g001.tif"/>
</fig>
<p>The experiment began with 4 practice trials. Each trial started with a 200&#x2009;ms fixation cross, followed by a 300&#x2013;500&#x2009;ms blank screen. Then, a facial image was displayed for 200&#x2009;ms. Participants were instructed to promptly and accurately select the correct word from a pair of words by pressing the &#x201C;f&#x201D; or &#x201C;j&#x201D; key within 2000&#x2009;ms. If participants do not make a selection before the time limit expires, the word pair will vanish automatically. The interval between trials is 1,000&#x2013;1,200&#x2009;ms.</p>
<p>The test trials immediately followed the practice phase, using procedures identical to those of the practice trials. With the six basic facial expressions utilized in the paradigm, there were a total of 15 combinations. Each block presented one of these 15 combinations of facial expressions, and participants were tasked with recognizing emotions through a forced-choice task. Each facial expression category within each block consisted of 10 trials, resulting in a total of 20 trials per block. We established a total of 30 blocks, totaling 600 trials. The order of the 30 blocks was randomized for each participant. The entire experiment lasted approximately 40&#x2013;50&#x2009;min.</p>
</sec>
<sec id="sec6">
<label>2.4</label>
<title>Statistical analysis</title>
<p>The data analysis in this study is structured into two main parts. The first part primarily focuses on examining the developmental patterns and gender differences related to the accuracy and reaction time associated with the six basic expressions across age groups, utilizing traditional analysis of variance (ANOVA). Subsequently, the second part employs advanced data modeling techniques to further delineate the trajectories of expression recognition performance with age, while also investigating gender differences.</p>
<p>In this study, a mixed-design ANOVA was conducted to analyze the effects of expression type, age group, and gender on facial expression recognition. The design consisted of a 6 (expression type: happiness, fear, anger, disgust, surprise, and sadness)&#x2009;&#x00D7;&#x2009;5 (age group: 6&#x2013;7&#x2009;years, 8&#x2013;9&#x2009;years, 10&#x2013;11&#x2009;years, 12&#x2013;13&#x2009;years, and 14&#x2013;15&#x2009;years)&#x2009;&#x00D7;&#x2009;2 (gender: female and male) factorial structure. Expression type served as the intra-group factor, while age and gender served as inter-group factors. The dependent variables were accuracy and reaction time (RT) of emotion recognition for facial expressions. Accuracy data were used as an index of facial expression emotion recognition, while reaction time data were utilized as a measure of the speed of facial expression emotion recognition. This analysis aimed to examine the developmental patterns of facial expression recognition across different age groups and explore the influence of gender on children&#x2019;s expression recognition.</p>
<p>To characterize the increase and decrease of accuracy with respect to age for each expression, we employed General Linear Models (GLMs) across all age groups independently for each emotional expression. For each expression, we independently sampled with replacement for each group and used GLM to fit the accuracy of each expression with respect to age under each group of samples. Then, we calculated the first derivative of each fitted line (equivalent to the beta obtained by fitting a GLM with an intercept), resulting in 1000 derivative values for each emotion. Each derivative represents the rate of change in the accuracy of a specific emotion with age. Additionally, we resampled the boy and girl samples following the previous step to obtain the slope of the regression line under different genders.</p>
</sec>
</sec>
<sec sec-type="results" id="sec7">
<label>3</label>
<title>Results</title>
<sec id="sec8">
<label>3.1</label>
<title>Results for analysis of variance(ANOVA) across different age groups</title>
<p>The accuracy data and reaction time data of 510 children were subjected to 3-way mixed-design ANOVAs (6 emotion type&#x00D7;5 age group&#x00D7;2 gender). The analysis of accuracy revealed significant main effect for age group, <italic>F</italic> (4, 500)&#x2009;=&#x2009;18.566, <italic>p</italic>&#x2009;&#x003C;&#x2009;0.001, <inline-formula>
<mml:math id="M1">
<mml:msubsup>
<mml:mi>&#x03B7;</mml:mi>
<mml:mi mathvariant="normal">p</mml:mi>
<mml:mn>2</mml:mn>
</mml:msubsup>
</mml:math>
</inline-formula>=0.129. The accuracy of the five age groups increased, with age group 2 being the inflection point where the accuracy data began to stabilize. A significant main effect of emotion type was found (<italic>F</italic> (5, 2, 500)&#x2009;=&#x2009;343.796, <italic>p</italic>&#x2009;&#x003C;&#x2009;0.001, <inline-formula>
<mml:math id="M2">
<mml:msubsup>
<mml:mi>&#x03B7;</mml:mi>
<mml:mi mathvariant="normal">p</mml:mi>
<mml:mn>2</mml:mn>
</mml:msubsup>
</mml:math>
</inline-formula>=0.407), with the accuracy data ranking from highest to lowest for the 6 emotions as happiness, surprise, sadness, disgust, fear, and anger. There was also a significant main effect of gender [<italic>F</italic> (1, 500)&#x2009;=&#x2009;9.477, <italic>p</italic>&#x2009;&#x003C;&#x2009;0.001, <inline-formula>
<mml:math id="M3">
<mml:msubsup>
<mml:mi>&#x03B7;</mml:mi>
<mml:mi mathvariant="normal">p</mml:mi>
<mml:mn>2</mml:mn>
</mml:msubsup>
</mml:math>
</inline-formula>=0.019], with girls showing higher accuracy than boys. The interactions of age group &#x00D7; emotion type [<italic>F</italic> (20, 2, 500)&#x2009;=&#x2009;2.804, <italic>p</italic>&#x2009;&#x003C;&#x2009;0.001, <inline-formula>
<mml:math id="M4">
<mml:msubsup>
<mml:mi>&#x03B7;</mml:mi>
<mml:mi mathvariant="normal">p</mml:mi>
<mml:mn>2</mml:mn>
</mml:msubsup>
</mml:math>
</inline-formula>=0.022] and age group &#x00D7; emotion type&#x00D7;gender [<italic>F</italic> (20, 2, 500)&#x2009;=&#x2009;2.901, <italic>p</italic>&#x2009;&#x003C;&#x2009;0.001, <inline-formula>
<mml:math id="M5">
<mml:msubsup>
<mml:mi>&#x03B7;</mml:mi>
<mml:mi mathvariant="normal">p</mml:mi>
<mml:mn>2</mml:mn>
</mml:msubsup>
</mml:math>
</inline-formula>=0.023] were significant. However, the interaction of emotion type&#x00D7;gender was not statistically significant (<italic>p</italic>&#x2009;&#x003E;&#x2009;0.05). For boys, the interaction of age group &#x00D7; emotion type [<italic>F</italic> (4, 1, 340)&#x2009;=&#x2009;1.818, <italic>p</italic>&#x2009;&#x003C;&#x2009;0.05, <inline-formula>
<mml:math id="M6">
<mml:msubsup>
<mml:mi>&#x03B7;</mml:mi>
<mml:mi mathvariant="normal">p</mml:mi>
<mml:mn>2</mml:mn>
</mml:msubsup>
</mml:math>
</inline-formula>=0.026] was significant. For girls, the interaction of age group&#x00D7;emotion type [<italic>F</italic> (4, 1, 160)&#x2009;=&#x2009;4.012, <italic>p</italic>&#x2009;&#x003C;&#x2009;0.001, <inline-formula>
<mml:math id="M7">
<mml:msubsup>
<mml:mi>&#x03B7;</mml:mi>
<mml:mi mathvariant="normal">p</mml:mi>
<mml:mn>2</mml:mn>
</mml:msubsup>
</mml:math>
</inline-formula>=0.065] was also significant (see <xref ref-type="fig" rid="fig2">Figure 2A</xref>).</p>
<fig position="float" id="fig2">
<label>Figure 2</label>
<caption>
<p>Accuracy <bold>(A)</bold> and reaction time <bold>(B)</bold> of facial expression recognition across different age groups.</p>
</caption>
<graphic xlink:href="fpsyg-15-1379652-g002.tif"/>
</fig>
<p>For reaction time, the main effect of emotion type was significant, <italic>F</italic> (5, 2, 500)&#x2009;=&#x2009;241.49, <italic>p</italic>&#x2009;&#x003C;&#x2009;0.001, <inline-formula>
<mml:math id="M8">
<mml:msubsup>
<mml:mi>&#x03B7;</mml:mi>
<mml:mi mathvariant="normal">p</mml:mi>
<mml:mn>2</mml:mn>
</mml:msubsup>
</mml:math>
</inline-formula>=0.326. The average reaction time for the 6 types of facial expression recognition is happiness, surprise, sadness, disgust, angry, and fear. The main effect of age group was also significant [<italic>F</italic> (4, 500)&#x2009;=&#x2009;14.112, <italic>p</italic>&#x2009;&#x003C;&#x2009;0.001, <inline-formula>
<mml:math id="M9">
<mml:msubsup>
<mml:mi>&#x03B7;</mml:mi>
<mml:mi mathvariant="normal">p</mml:mi>
<mml:mn>2</mml:mn>
</mml:msubsup>
</mml:math>
</inline-formula>=0.101]. The reaction time of the five age groups decreased (see <xref ref-type="fig" rid="fig2">Figure 2B</xref>). The interaction between emotion type and age group was significant [<italic>F</italic> (20, 2, 500) =3.205, <italic>p</italic>&#x2009;&#x003C;&#x2009;0.001, <inline-formula>
<mml:math id="M10">
<mml:msubsup>
<mml:mi>&#x03B7;</mml:mi>
<mml:mi mathvariant="normal">p</mml:mi>
<mml:mn>2</mml:mn>
</mml:msubsup>
</mml:math>
</inline-formula>=0.025]. The main effect of gender, the interaction of emotion type and gender, and the interaction of emotion type, age stage, and gender were not significant.</p>
</sec>
<sec id="sec9">
<label>3.2</label>
<title>General linear model regression analyses with bootstrap procedure</title>
<p>Through general linear model regression analysis, it was found that the accuracy of facial expression recognition generally improves during the transition from childhood to adolescence for all facial expressions (<xref ref-type="fig" rid="fig3">Figure 3A</xref>). The improvement in facial expression recognition accuracy across development varies significantly for each pair of emotions, with each emotional expression exhibiting a unique trajectory across development. The expression of disgust showed the steepest improvement in recognition with age, closely followed by fear. In contrast, expressions of happiness and sadness displayed a more gradual improvement across age.</p>
<fig position="float" id="fig3">
<label>Figure 3</label>
<caption>
<p>General linear model regression analysis of facial expression recognition accuracy across different age groups. <bold>A</bold> represents the overall population, <bold>B</bold> represents boys, and <bold>C</bold> represents girls.</p>
</caption>
<graphic xlink:href="fpsyg-15-1379652-g003.tif"/>
</fig>
<p>Significant differences across genders were observed in the developmental trajectory of different expressions (see <xref ref-type="fig" rid="fig3">Figures 3B</xref>,<xref ref-type="fig" rid="fig3">C</xref>). These differences can be categorized into two groups: boys showed a steeper improvement with age in recognizing expressions of disgust, fear, and anger; girls showed a steeper improvement with age in recognizing expressions of surprise, sadness, and happiness.</p>
</sec>
<sec id="sec10">
<label>3.3</label>
<title>Distance matrix and multidimensional scaling analyses</title>
<p>To further investigate the relationship between emotion recognition and age, we divided the sample into five groups based on age ranges (6&#x2013;7, 8&#x2013;9, 10&#x2013;11, 12&#x2013;13, 14&#x2013;15&#x2009;years). Subsequently, we calculated the average accuracy for all expressions across the age groups, resulting in six values. Next, we computed the distances between each pair of age groups. Each value within this matrix indicates the distance between two age groups for a specific emotion (see <xref ref-type="fig" rid="fig4">Figure 4</xref>).</p>
<fig position="float" id="fig4">
<label>Figure 4</label>
<caption>
<p>Distance matrix (i.e., matrix of pairwise difference values) of all age groups for for 6 basic facial expression.</p>
</caption>
<graphic xlink:href="fpsyg-15-1379652-g004.tif"/>
</fig>
<p>To discern the age groups that demonstrate the closest similarity, we conducted a multidimensional scaling analysis. This analysis allowed us to visualize the similarity between age groups based on their response patterns. Age groups that were placed close together on the multidimensional scaling plot indicated similar response patterns. The results of the multidimensional scaling analysis demonstrated the age groups across development that are the most similar in overall mean recognition accuracy scores (see <xref ref-type="fig" rid="fig5">Figure 5</xref>). Except for the 6&#x2013;7 age group, the distances of age groups 8&#x2013;15 are clustered together. This clustering indicates similar overall patterns and suggests that there are two main phases in the development of facial emotional expression recognition.</p>
<fig position="float" id="fig5">
<label>Figure 5</label>
<caption>
<p>The multidimensional scaling analysis results for six basic facial expressions.</p>
</caption>
<graphic xlink:href="fpsyg-15-1379652-g005.tif"/>
</fig>
</sec>
</sec>
<sec sec-type="discussion" id="sec11">
<label>4</label>
<title>Discussion</title>
<p>This study, for the first time, explores the developmental trajectory of children&#x2019;s facial expression recognition performance using a two forced-choice paradigm in a large number of children across a wide age range. There are two main findings. Firstly, the recognition performance of the six basic expressions improves with age, and around the age of 8, the recognition performance of the six basic expressions tends to stabilize. Secondly, girls outperform boys in facial expression recognition. However, the rate of development may differ between genders. These differences can be categorized into two groups: boys showed a steeper improvement with age in recognizing expressions of disgust, fear, and anger; girls showed a steeper improvement with age in recognizing expressions of surprise, sadness, and happiness.</p>
<sec id="sec12">
<label>4.1</label>
<title>The influence of age on the development of facial expression recognition</title>
<p>Our study demonstrated that age plays a significant role in children&#x2019;s facial expression recognition. The accuracy in recognizing emotions such as anger, sadness, surprise, happiness, fear, and disgust stabilizes between the ages of 8 and 15&#x2009;years. We have found that there is a continuous improvement in the proficiency of recognizing facial expressions among children transitioning into adolescence, which is consistent with previous studies (<xref ref-type="bibr" rid="ref3">Bandura and Menlove, 1968</xref>; <xref ref-type="bibr" rid="ref17">Gosselin et al., 1995</xref>; <xref ref-type="bibr" rid="ref51">Vicari et al., 2000</xref>; <xref ref-type="bibr" rid="ref37">Naruse et al., 2013</xref>; <xref ref-type="bibr" rid="ref28">Lawrence et al., 2015</xref>; <xref ref-type="bibr" rid="ref46">Ruffman et al., 2023</xref>). However, what sets our research apart is that we have discovered a stable age inflection point in individuals&#x2019; ability to recognize different facial expressions. Essentially, this inflection point represents a crucial moment in the process of recognizing diverse expressions.</p>
<p>This study provides strong evidence to clarify the key age stages of children&#x2019;s facial expression recognition. From a developmental perspective, our results are inconsistent with the findings of <xref ref-type="bibr" rid="ref43">Rodger et al. (2015)</xref> regarding the two stages of facial expression recognition development from ages 5 to 12 and from ages 13 to adulthood. Our study provides robust evidence to suggest that the recognition of facial expressions in children is closely linked to specific age stages, with a notable shift occurring around the age of 8. The primary factor contributing to the disparity between our findings and those of previous studies lies in the utilization of a significantly large sample size and a highly continuous age range in our investigation. Furthermore, it is plausible that variations in facial expression recognition paradigms may contribute to the observed discrepancies. Another aspect to consider is that the facial expression recognition paradigm we utilized involved relatively short presentation for the facial expressions, emphasizing the attributes of rapid facial expression recognition.</p>
<p>According to sociological theories, children&#x2019;s social environment undergoes significant changes in their junior year of primary school. During this period, children begin to establish school bonding with classmates and teachers through socialization (<xref ref-type="bibr" rid="ref10">Catalano et al., 2004</xref>). Faces are recognized as a primary tool for social communication with peers (see <xref ref-type="bibr" rid="ref23">Jack and Schyns, 2015</xref> for a review). Hence, during the process of bonding and communication, children could be trained to better read the facial expressions of people in the social environment, leading to rapid development in expression recognition ability (<xref ref-type="bibr" rid="ref37">Naruse et al., 2013</xref>). It is therefore reasonable to suggest that the inflection points of children&#x2019;s emotion recognition could be detected around the age of 8&#x2009;years old.</p>
<p>In addition to social reasons, continued neurological development also explains the pattern. For example, although there may not be a one-to-one relationship between a certain brain region and specific emotion recognition, the ongoing development of the medial prefrontal cortex (MPFC) as a general region for emotion processing and the anterior cingulate cortex (ACC) as the attention regulator for emotional stimuli (see <xref ref-type="bibr" rid="ref41">Phan et al., 2002</xref> for a review) throughout childhood and adolescence could explain the improvement over time.</p>
<p>Additionally, the reaction time for children&#x2019;s recognition decreases from 10 to 15&#x2009;years of age. It should be noted that the inflection point in the performance of facial expression recognition does not completely coincide with the inflection point in reaction time. This finding provides further evidence to support the notion that the development of facial expression recognition and cognitive development follow distinct trajectories. Essentially, the advancement of facial expression recognition does not occur solely as a consequence of alterations in cognitive development. It also suggests that the emergence of the inflection point in children&#x2019;s facial expression recognition may be influenced by the process of socialization. Due to the utilization of emotion labeling or recognition tasks in the current study, as opposed to Rodger&#x2019;s study which focused on assessing perceptual thresholds, it is plausible that the observed age-related improvements in accuracy were associated with the efficiency of recognition and/or labeling processes, rather than any perceptual developments.</p>
</sec>
<sec id="sec13">
<label>4.2</label>
<title>The gender difference on the development of facial expression recognition</title>
<p>Our findings showed that the accuracy of children&#x2019;s facial expression recognition is affected by gender. In general, females are found to exhibit more acute abilities at decoding and processing discrete facial expressions (<xref ref-type="bibr" rid="ref27">Larkin et al., 2002</xref>; <xref ref-type="bibr" rid="ref20">Hall and Matsumoto, 2004</xref>; <xref ref-type="bibr" rid="ref40">Passarelli et al., 2018</xref>). Additionally, girls also often obtain higher accuracy than boys (<xref ref-type="bibr" rid="ref34">McClure, 2000</xref>). Previous studies have shown that gender poses a significant effect on the accuracy of recognizing expressions of surprise, and on average, girls exhibit more accurate recognition for expressions of anger and disgust than boys (<xref ref-type="bibr" rid="ref36">Montirosso et al., 2010</xref>). Other studies also affirmed this gender impact on the accuracy of recognizing expressions of disgust in children aged 8&#x2013;11&#x2009;years (<xref ref-type="bibr" rid="ref32">Mancini et al., 2013</xref>). <xref ref-type="bibr" rid="ref28">Lawrence et al&#x2019;s. (2015)</xref> research demonstrated that there is a female advantage in facial expression recognition, with girls exhibiting higher accuracy than boys in all ages between 6&#x2013;16 years old.</p>
<p>Gender differences in the inflection points of recognition accuracy for expressions of surprise, happiness, and anger demonstrated a gender advantage by participants. The neural regions involved in the facial expression processing of males and females utilize different activation modes. Males and females exhibit unique activation modes in their neural regions involved in emotional facial expression processing such as the amygdala and prefrontal cortex (<xref ref-type="bibr" rid="ref32">Mancini et al., 2013</xref>; <xref ref-type="bibr" rid="ref28">Lawrence et al., 2015</xref>; <xref ref-type="bibr" rid="ref1">Arriaga and Aguiar, 2019</xref>). As such, the two genders may rely on different mental processes to recognize facial expressions. However, it is worth mentioning that this gender difference decreases with age. The female&#x2019;s overall advantage in facial expression recognition found by the present study is consistent with reports based on unbiased hit rates by <xref ref-type="bibr" rid="ref47">Sasson et al. (2010)</xref>, who employed static stimuli with two intensity levels. These results reveal that females are better at reading emotional facial expressions than males regardless of the degree of visual cues displayed on the face. The female&#x2019;s judgment of expressions, independent of the degree of facial muscle activation, suggests that the mechanisms for reading emotions are generally better in females than in males. The females&#x2019; gender advantage in facial expression recognition (accuracy and speed) appears to be more robust when using stimuli of higher ecological validity, or stimuli incorporating a wide range of emotional variations displayed dynamically. The current study highlights the importance of employing stimuli of higher ecological validity in future research.</p>
<p>From an evolutionary perspective, the ability of females to more quickly and accurately recognize emotions could be associated with females&#x2019; roles as caretakers for children and families. As proposed by the primary caretaker hypothesis (<xref ref-type="bibr" rid="ref2">Babchuk et al., 1985</xref>), fast and automatic processing of facial expression might involve innate, evolutionary mechanisms to effectively tend to offspring. Being able to instantly identify others&#x2019; emotions could allow females to make appropriate responses to the needs of others. For example, after recognizing sadness in another person, females could reciprocate with comforting behaviors, which is important for maintaining their social bonding and nurturing roles.</p>
<p>Conversely, females&#x2019; advantage could be acquired as part of the unique emotional experiences and expectations of their gender in socialization. According to the biosocial model (<xref ref-type="bibr" rid="ref35">Money and Ehrhardt, 1972</xref>), once a human is born, their biological sex determines their social labeling. This labeling leads to differentiated treatments for boys and girls. Generally, females are encouraged to display and recognize emotions while males are asked to suppress them (<xref ref-type="bibr" rid="ref7">Buck, 1977</xref>). Thus, females are more likely to possess an advantage in exposure to facial expressions compared to males. Derived from the reports by <xref ref-type="bibr" rid="ref8">Calvo et al. (2014)</xref>, this exposure advantage may lead to familiarization that facilitates the identification of facial emotional expressions. <xref ref-type="bibr" rid="ref8">Calvo et al. (2014)</xref> proved that individuals are better at recognizing the emotions that they encounter the most frequently in social interactions simply due to familiarity. It is therefore possible that females predominantly develop better emotion-processing abilities due to more exposure to emotional displays. Social influence coupled with biological factors can result in females being more well-versed in facial expression recognition. Socialization practices and display rules may make it easier for girls to display emotion expressions. It would thus be fascinating to further investigate how gender and socialization relate to emotion recognition, such as whether gender-typical female identification and socialization are associated with better recognition and vice versa.</p>
</sec>
<sec id="sec14">
<label>4.3</label>
<title>The influence of emotion type on facial expression recognition</title>
<p>The current study reveals that the developmental patterns for different emotions are not uniform. We found that children&#x2019;s recognition of happiness has the highest accuracy, with significant differences from the other five basic expressions. This positivity bias in children has been confirmed by numerous studies (<xref ref-type="bibr" rid="ref25">Kestenbaum and Nelson, 1992</xref>; <xref ref-type="bibr" rid="ref6">Boyatzis et al., 1993</xref>; <xref ref-type="bibr" rid="ref29">Lenti et al., 1999</xref>; <xref ref-type="bibr" rid="ref16">Garcia and Tully, 2020</xref>), and it may rest in the natural facial structure it induces. Children, like adults, interpret facial expressions primarily based on basic facial features. As such, facial expressions with high similarities are easily confused, resulting in low accuracy. Compared with changes in the facial structure of negative emotions (sadness, anger, fear, disgust), the facial feature for happiness is clear and distinct, hence the happy expression could be easily recognized (<xref ref-type="bibr" rid="ref32">Mancini et al., 2013</xref>; <xref ref-type="bibr" rid="ref49">Sou and Xu, 2019</xref>).</p>
<p>On the other hand, the recognition of negative emotions, especially anger, is rather disadvantaged. Our results show that the accuracy for recognizing angry expressions is the lowest. Both anger and disgust communicate the social information of condemnation (<xref ref-type="bibr" rid="ref45">Rot et al., 2022</xref>), which potentially explains why they are easily confused, especially why it is difficult for children to distinguish one from the other (<xref ref-type="bibr" rid="ref54">Widen and Russell, 2010a</xref>,<xref ref-type="bibr" rid="ref55">b</xref>; <xref ref-type="bibr" rid="ref37">Naruse et al., 2013</xref>). An alternative explanation to the disadvantaged recognition of anger is that anger is similar to other emotions to some extent, for example, sadness (<xref ref-type="bibr" rid="ref14">Ekman and Friesen, 1978</xref>). However, findings in other studies were inconsistent with this. One study proposed that anger, like happiness, is one of the most easily recognized emotions (<xref ref-type="bibr" rid="ref32">Mancini et al., 2013</xref>), while <xref ref-type="bibr" rid="ref16">Garcia and Tully (2020)</xref> stated that anger was identified more accurately than sadness, but less accurately than happiness by children aged 7&#x2013;10&#x2009;years old.</p>
<p>In the case of fear recognition, we found that the accuracy of recognizing fear was the second lowest. The difficulty in identifying fear is supported by previous studies (<xref ref-type="bibr" rid="ref33">Matsumoto and Hwang, 2011</xref>; <xref ref-type="bibr" rid="ref32">Mancini et al., 2013</xref>). However, this result is hardly consistent with the hypothesis of psychological evolution, which states that recognizing expressions is adaptive and allows individuals to avoid dangers in the environment. The ability to recognize fear is particularly beneficial to avoid potential threats so that individuals can better plan for their next move (such as fight or flight).</p>
</sec>
<sec id="sec15">
<label>4.4</label>
<title>Limitations and direction for future studies</title>
<p>Although the main hypotheses are supported and the findings are mostly in line with previous studies, the current study still possesses several limitations that could be addressed in future research.</p>
<p>The first concerns ecological validity, as mentioned before. Only static images were employed in the current study. To improve the representation of real life, it is therefore suggested to use animated images or recorded video clips of models at different intensities in future studies. This would give participants a more vivid experience and allow for a more comprehensive evaluation of gender differences in facial expression recognition ability across different age groups.</p>
<p>Regarding the second limitation, participants completed a forced-choice task in this study, which is highly dependent on their verbal and visual abilities. However, for children, these two abilities are not fully developed, imposing unfair disadvantages and potentially confounding the outcomes of our study. In future studies, we recommend a combination of measurements suitable for different age groups, such as discrimination paradigms and free labeling tasks, for a more accurate assessment of facial expression recognition ability.</p>
</sec>
</sec>
<sec sec-type="conclusions" id="sec16">
<label>5</label>
<title>Conclusion</title>
<p>In summary, our study findings indicate several key points regarding facial expression recognition in children. Firstly, facial expression recognition accuracy improves during childhood and stabilizes between the ages of 8 and 15, showing synchronous developmental patterns across various expressions. Secondly, children exhibit a decreasing trend in reaction time for recognizing facial expressions from ages 10 to 15. Thirdly, gender influences the accuracy of facial expression recognition in children, with girls demonstrating higher accuracy compared to boys.</p>
</sec>
<sec sec-type="data-availability" id="sec17">
<title>Data availability statement</title>
<p>The original contributions presented in the study are included in the article/supplementary material, further inquiries can be directed to the corresponding author.</p>
</sec>
<sec sec-type="ethics-statement" id="sec18">
<title>Ethics statement</title>
<p>The studies involving humans were approved by Institute of Psychology, Chinese Academy of Sciences. The studies were conducted in accordance with the local legislation and institutional requirements. Written informed consent for participation in this study was provided by the participants' legal guardians/next of kin. Written informed consent was obtained from the individual(s) for the publication of any potentially identifiable images or data included in this article.</p>
</sec>
<sec sec-type="author-contributions" id="sec19">
<title>Author contributions</title>
<p>YW: Data curation, Formal analysis, Investigation, Methodology, Writing &#x2013; original draft. QL: Data curation, Formal analysis, Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing. YZ: Investigation, Validation, Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing. KZ: Conceptualization, Formal analysis, Funding acquisition, Investigation, Methodology, Project administration, Resources, Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing.</p>
</sec>
</body>
<back>
<sec sec-type="funding-information" id="sec20">
<title>Funding</title>
<p>The author(s) declare that financial support was received for the research, authorship, and/or publication of this article. This research was financially supported in part by the National Natural Science Foundation of China (32071055).</p>
</sec>
<sec sec-type="COI-statement" id="sec21">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="sec100" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="ref1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Arriaga</surname> <given-names>P.</given-names></name> <name><surname>Aguiar</surname> <given-names>C.</given-names></name></person-group> (<year>2019</year>). <article-title>Gender differences in aggression: the role of displaying facial emotional cues in a competitive situation</article-title>. <source>Scand. J. Psychol.</source> <volume>60</volume>, <fpage>421</fpage>&#x2013;<lpage>429</lpage>. doi: <pub-id pub-id-type="doi">10.1111/sjop.12568</pub-id>, PMID: <pub-id pub-id-type="pmid">31378010</pub-id></citation></ref>
<ref id="ref2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Babchuk</surname> <given-names>W. A.</given-names></name> <name><surname>Hames</surname> <given-names>R. B.</given-names></name> <name><surname>Thompson</surname> <given-names>R. A.</given-names></name></person-group> (<year>1985</year>). <article-title>Sex differences in the recognition of infant facial expressions of emotion: the primary caretaker hypothesis</article-title>. <source>Ethol. Sociobiol.</source> <volume>6</volume>, <fpage>89</fpage>&#x2013;<lpage>101</lpage>. doi: <pub-id pub-id-type="doi">10.1016/0162-3095(85)90002-0</pub-id></citation></ref>
<ref id="ref3"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bandura</surname> <given-names>A.</given-names></name> <name><surname>Menlove</surname> <given-names>F. L.</given-names></name></person-group> (<year>1968</year>). <article-title>Factors determining vicarious extinction of avoidance behavior through symbolic modeling</article-title>. <source>J. Pers. Soc. Psychol.</source> <volume>8</volume>, <fpage>99</fpage>&#x2013;<lpage>108</lpage>. doi: <pub-id pub-id-type="doi">10.1037/h0025260</pub-id>, PMID: <pub-id pub-id-type="pmid">5644484</pub-id></citation></ref>
<ref id="ref4"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bayet</surname> <given-names>L.</given-names></name> <name><surname>Nelson</surname> <given-names>C. A.</given-names></name></person-group> (<year>2019</year>). <article-title>The perception of facial emotion in typical and atypical development</article-title>. In LoBue V., P&#x00E9;rez-Edgar K., Buss K. A. (Eds.) <source>Handbook Emot. Dev.</source> <publisher-name>Springer</publisher-name>, <fpage>105</fpage>&#x2013;<lpage>138</lpage>. doi: <pub-id pub-id-type="doi">10.1007/978-3-030-17332-6_6</pub-id></citation></ref>
<ref id="ref5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bourne</surname> <given-names>V. J.</given-names></name></person-group> (<year>2005</year>). <article-title>Lateralised processing of positive facial emotion: sex differences in strength of hemispheric dominance</article-title>. <source>Neuropsychologia</source> <volume>43</volume>, <fpage>953</fpage>&#x2013;<lpage>956</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuropsychologia.2004.08.007</pub-id>, PMID: <pub-id pub-id-type="pmid">15716165</pub-id></citation></ref>
<ref id="ref6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Boyatzis</surname> <given-names>C. J.</given-names></name> <name><surname>Chazan</surname> <given-names>E.</given-names></name> <name><surname>Ting</surname> <given-names>C. Z.</given-names></name></person-group> (<year>1993</year>). <article-title>Preschool children's decoding of facial emotions</article-title>. <source>J. Genet. Psychol.</source> <volume>154</volume>, <fpage>375</fpage>&#x2013;<lpage>382</lpage>. doi: <pub-id pub-id-type="doi">10.1080/00221325.1993.10532190</pub-id>, PMID: <pub-id pub-id-type="pmid">8245911</pub-id></citation></ref>
<ref id="ref7"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Buck</surname> <given-names>R.</given-names></name></person-group> (<year>1977</year>). <article-title>Nonverbal communication of affect in preschool in children: relationships with personality and skin conductance</article-title>. <source>J. Pers. Soc. Psychol.</source> <volume>35</volume>, <fpage>225</fpage>&#x2013;<lpage>236</lpage>. doi: <pub-id pub-id-type="doi">10.1037/0022-3514.35.4.225</pub-id>, PMID: <pub-id pub-id-type="pmid">864589</pub-id></citation></ref>
<ref id="ref8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Calvo</surname> <given-names>M. G.</given-names></name> <name><surname>Guti&#x00E9;rrez-Garc&#x00ED;a</surname> <given-names>A.</given-names></name> <name><surname>Fern&#x00E1;ndez-Mart&#x00ED;n</surname> <given-names>A.</given-names></name> <name><surname>Nummenmaa</surname> <given-names>L.</given-names></name></person-group> (<year>2014</year>). <article-title>Recognition of facial expressions of emotion is related to their frequency in everyday life</article-title>. <source>J. Nonverbal Behav.</source> <volume>38</volume>, <fpage>549</fpage>&#x2013;<lpage>567</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s10919-014-0191-3</pub-id></citation></ref>
<ref id="ref9"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cameron</surname> <given-names>D.</given-names></name> <name><surname>Millings</surname> <given-names>A.</given-names></name> <name><surname>Fernando</surname> <given-names>S.</given-names></name> <name><surname>Collins</surname> <given-names>E. C.</given-names></name> <name><surname>Moore</surname> <given-names>R.</given-names></name> <name><surname>Sharkey</surname> <given-names>A.</given-names></name> <etal/></person-group>. (<year>2018</year>). <article-title>The effects of robot facial emotional expressions and gender on child&#x2013;robot interaction in a field study</article-title>. <source>Connect. Sci.</source> <volume>30</volume>, <fpage>343</fpage>&#x2013;<lpage>361</lpage>. doi: <pub-id pub-id-type="doi">10.1080/09540091.2018.1454889</pub-id></citation></ref>
<ref id="ref10"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Catalano</surname> <given-names>R. F.</given-names></name> <name><surname>Haggerty</surname> <given-names>K. P.</given-names></name> <name><surname>Oesterle</surname> <given-names>S.</given-names></name> <name><surname>Fleming</surname> <given-names>C. B.</given-names></name> <name><surname>Hawkins</surname> <given-names>J. D.</given-names></name></person-group> (<year>2004</year>). <article-title>The importance of bonding to school for healthy development: findings from the social development research group</article-title>. <source>J. Sch. Health</source> <volume>74</volume>, <fpage>252</fpage>&#x2013;<lpage>261</lpage>. doi: <pub-id pub-id-type="doi">10.1111/j.1746-1561.2004.tb08281.x</pub-id>, PMID: <pub-id pub-id-type="pmid">15493702</pub-id></citation></ref>
<ref id="ref11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cunningham</surname> <given-names>J. G.</given-names></name> <name><surname>Odom</surname> <given-names>R. D.</given-names></name></person-group> (<year>1986</year>). <article-title>Differential salience of facial features in children's perception of affective expression</article-title>. <source>Child Dev.</source> <volume>57</volume>, <fpage>136</fpage>&#x2013;<lpage>142</lpage>. doi: <pub-id pub-id-type="doi">10.2307/1130645</pub-id></citation></ref>
<ref id="ref12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Durand</surname> <given-names>K.</given-names></name> <name><surname>Gallay</surname> <given-names>M.</given-names></name> <name><surname>Seigneuric</surname> <given-names>A.</given-names></name> <name><surname>Robichon</surname> <given-names>F.</given-names></name> <name><surname>Baudouin</surname> <given-names>J. Y.</given-names></name></person-group> (<year>2007</year>). <article-title>The development of facial expression recognition: the role of configural information</article-title>. <source>J. Exp. Child Psychol.</source> <volume>97</volume>, <fpage>14</fpage>&#x2013;<lpage>27</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jecp.2006.12.001</pub-id>, PMID: <pub-id pub-id-type="pmid">17291524</pub-id></citation></ref>
<ref id="ref13"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ekman</surname> <given-names>P.</given-names></name></person-group> (<year>1976</year>). <article-title>Measuring facial movement</article-title>. <source>Environ. Psychol. Nonverbal Behav.</source> <volume>1</volume>, <fpage>56</fpage>&#x2013;<lpage>75</lpage>. doi: <pub-id pub-id-type="doi">10.1007/BF01115465</pub-id></citation></ref>
<ref id="ref14"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ekman</surname> <given-names>P.</given-names></name> <name><surname>Friesen</surname> <given-names>W. V.</given-names></name></person-group> (<year>1978</year>). <article-title>Facial Action Coding System: A technique for the measurement of facial action</article-title>. <source>Environ. Psychol. Nonverbal Behav</source>. doi: <pub-id pub-id-type="doi">10.1037/t27734&#x2013;000</pub-id></citation></ref>
<ref id="ref15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gao</surname> <given-names>X.</given-names></name> <name><surname>Maurer</surname> <given-names>D.</given-names></name></person-group> (<year>2009</year>). <article-title>Influence of intensity on children&#x2019;s sensitivity to happy, sad, and fearful facial expressions</article-title>. <source>J. Exp. Child Psychol.</source> <volume>102</volume>, <fpage>503</fpage>&#x2013;<lpage>521</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jecp.2008.11.002</pub-id>, PMID: <pub-id pub-id-type="pmid">19124135</pub-id></citation></ref>
<ref id="ref16"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Garcia</surname> <given-names>S. E.</given-names></name> <name><surname>Tully</surname> <given-names>E. C.</given-names></name></person-group> (<year>2020</year>). <article-title>Children's recognition of happy, sad, and angry facial expressions across emotive intensities</article-title>. <source>J. Exp. Child Psychol.</source> <volume>197</volume>:<fpage>104881</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jecp.2020.104881</pub-id>, PMID: <pub-id pub-id-type="pmid">32559635</pub-id></citation></ref>
<ref id="ref17"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gosselin</surname> <given-names>P.</given-names></name> <name><surname>Roberge</surname> <given-names>P.</given-names></name> <name><surname>Lavall&#x00E9;e</surname> <given-names>M.</given-names></name></person-group> (<year>1995</year>). <article-title>The development of recognition of human facial expressions of emotion</article-title>. <source>Enfance</source> <volume>48</volume>, <fpage>379</fpage>&#x2013;<lpage>396</lpage>. doi: <pub-id pub-id-type="doi">10.3406/enfan.1995.2144</pub-id></citation></ref>
<ref id="ref18"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gross</surname> <given-names>A. L.</given-names></name> <name><surname>Ballif</surname> <given-names>B.</given-names></name></person-group> (<year>1991</year>). <article-title>Children's understanding of emotion from facial expressions and situations: a review</article-title>. <source>Dev. Rev.</source> <volume>11</volume>, <fpage>368</fpage>&#x2013;<lpage>398</lpage>. doi: <pub-id pub-id-type="doi">10.1016/0273-2297(91)90019-K</pub-id></citation></ref>
<ref id="ref19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hall</surname> <given-names>J. A.</given-names></name></person-group> (<year>1978</year>). <article-title>Gender effects in decoding nonverbal cues</article-title>. <source>Psychol. Bull.</source> <volume>85</volume>, <fpage>845</fpage>&#x2013;<lpage>857</lpage>. doi: <pub-id pub-id-type="doi">10.1037/0033-2909.85.4.845</pub-id></citation></ref>
<ref id="ref20"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hall</surname> <given-names>J. A.</given-names></name> <name><surname>Matsumoto</surname> <given-names>D.</given-names></name></person-group> (<year>2004</year>). <article-title>Gender differences in judgments of multiple emotions from facial expressions</article-title>. <source>Emotion</source> <volume>4</volume>, <fpage>201</fpage>&#x2013;<lpage>206</lpage>. doi: <pub-id pub-id-type="doi">10.1037/1528-3542.4.2.201</pub-id>, PMID: <pub-id pub-id-type="pmid">15222856</pub-id></citation></ref>
<ref id="ref21"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Herba</surname> <given-names>C. M.</given-names></name> <name><surname>Landau</surname> <given-names>S.</given-names></name> <name><surname>Russell</surname> <given-names>T.</given-names></name> <name><surname>Ecker</surname> <given-names>C.</given-names></name> <name><surname>Phillips</surname> <given-names>M. L.</given-names></name></person-group> (<year>2006</year>). <article-title>The development of emotion-processing in children: effects of age, emotion, and intensity</article-title>. <source>J. Child Psychol. Psychiatry</source> <volume>47</volume>, <fpage>1098</fpage>&#x2013;<lpage>1106</lpage>. doi: <pub-id pub-id-type="doi">10.1111/j.1469-7610.2006.01652.x</pub-id>, PMID: <pub-id pub-id-type="pmid">17076748</pub-id></citation></ref>
<ref id="ref22"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Herba</surname> <given-names>C.</given-names></name> <name><surname>Phillips</surname> <given-names>M.</given-names></name></person-group> (<year>2004</year>). <article-title>Annotation: development of facial expression recognition from childhood to adolescence: behavioural and neurological perspectives</article-title>. <source>J. Child Psychol. Psychiatry</source> <volume>45</volume>, <fpage>1185</fpage>&#x2013;<lpage>1198</lpage>. doi: <pub-id pub-id-type="doi">10.1111/j.1469-7610.2004.00316.x</pub-id>, PMID: <pub-id pub-id-type="pmid">15335339</pub-id></citation></ref>
<ref id="ref23"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jack</surname> <given-names>R. E.</given-names></name> <name><surname>Schyns</surname> <given-names>P. G.</given-names></name></person-group> (<year>2015</year>). <article-title>The human face as a dynamic tool for social communication</article-title>. <source>Curr. Biol.</source> <volume>25</volume>, <fpage>R621</fpage>&#x2013;<lpage>R634</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cub.2015.05.052</pub-id></citation></ref>
<ref id="ref24"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Johnston</surname> <given-names>P. J.</given-names></name> <name><surname>Kaufman</surname> <given-names>J.</given-names></name> <name><surname>Bajic</surname> <given-names>J.</given-names></name> <name><surname>Sercombe</surname> <given-names>A.</given-names></name> <name><surname>Michie</surname> <given-names>P. T.</given-names></name> <name><surname>Karayanidis</surname> <given-names>F.</given-names></name></person-group> (<year>2011</year>). <article-title>Facial emotion and identity processing development in 5- to 15-year-old children</article-title>. <source>Front. Psychol.</source> <volume>2</volume>:<fpage>26</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2011.00026</pub-id></citation></ref>
<ref id="ref25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kestenbaum</surname> <given-names>R.</given-names></name> <name><surname>Nelson</surname> <given-names>C. A.</given-names></name></person-group> (<year>1992</year>). <article-title>Neural and behavioral correlates of emotion recognition in children and adults</article-title>. <source>J. Exp. Child Psychol.</source> <volume>54</volume>, <fpage>1</fpage>&#x2013;<lpage>18</lpage>. doi: <pub-id pub-id-type="doi">10.1016/0022-0965(92)90014-W</pub-id></citation></ref>
<ref id="ref26"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kiecolt-Glaser</surname> <given-names>J. K.</given-names></name> <name><surname>Newton</surname> <given-names>T. L.</given-names></name></person-group> (<year>2001</year>). <article-title>Marriage and health: his and hers</article-title>. <source>Psychol. Bull.</source> <volume>127</volume>, <fpage>472</fpage>&#x2013;<lpage>503</lpage>. doi: <pub-id pub-id-type="doi">10.1037/0033-2909.127.4.472</pub-id>, PMID: <pub-id pub-id-type="pmid">11439708</pub-id></citation></ref>
<ref id="ref27"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Larkin</surname> <given-names>K. T.</given-names></name> <name><surname>Martin</surname> <given-names>R. R.</given-names></name> <name><surname>McClain</surname> <given-names>S. E.</given-names></name></person-group> (<year>2002</year>). <article-title>Cynical hostility and the accuracy of decoding facial expressions of emotions</article-title>. <source>J. Behav. Med.</source> <volume>25</volume>, <fpage>285</fpage>&#x2013;<lpage>292</lpage>. doi: <pub-id pub-id-type="doi">10.1023/A:1015384812283</pub-id></citation></ref>
<ref id="ref28"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lawrence</surname> <given-names>K.</given-names></name> <name><surname>Campbell</surname> <given-names>R.</given-names></name> <name><surname>Skuse</surname> <given-names>D.</given-names></name></person-group> (<year>2015</year>). <article-title>Age, gender and puberty influence the development of facial expression recognition</article-title>. <source>Front. Psychol.</source> <volume>6</volume>:<fpage>761</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2015.00761</pub-id></citation></ref>
<ref id="ref29"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lenti</surname> <given-names>C.</given-names></name> <name><surname>Lenti-Boero</surname> <given-names>D.</given-names></name> <name><surname>Giacobbe</surname> <given-names>A.</given-names></name></person-group> (<year>1999</year>). <article-title>Decoding of emotional expressions in children and adolescents</article-title>. <source>Percept. Mot. Skills</source> <volume>89</volume>, <fpage>808</fpage>&#x2013;<lpage>814</lpage>. doi: <pub-id pub-id-type="doi">10.2466/pms.1999.89.3.808</pub-id></citation></ref>
<ref id="ref30"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Liu</surname> <given-names>M.</given-names></name> <name><surname>Liu</surname> <given-names>C. H.</given-names></name> <name><surname>Zheng</surname> <given-names>S.</given-names></name> <name><surname>Zhao</surname> <given-names>K.</given-names></name> <name><surname>Fu</surname> <given-names>X.</given-names></name></person-group> (<year>2021</year>). <article-title>Reexamining the neural network involved in perception of facial expression: a meta-analysis</article-title>. <source>Neurosci. Biobehav. Rev.</source> <volume>131</volume>, <fpage>179</fpage>&#x2013;<lpage>191</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neubiorev.2021.09.024</pub-id>, PMID: <pub-id pub-id-type="pmid">34536463</pub-id></citation></ref>
<ref id="ref31"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Louisa</surname> <given-names>L.</given-names></name> <name><surname>Margaret</surname> <given-names>C. J.</given-names></name> <name><surname>Louise</surname> <given-names>P. H.</given-names></name></person-group> (<year>2019</year>). <article-title>Effects of induced sadness mood on facial emotion perception in young and older adults</article-title>. <source>Neuropsychol. Dev. Cogn. B Aging Neuropsychol. Cogn.</source> <volume>26</volume>, <fpage>319</fpage>&#x2013;<lpage>335</lpage>. doi: <pub-id pub-id-type="doi">10.1080/13825585.2018.1438584</pub-id></citation></ref>
<ref id="ref32"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mancini</surname> <given-names>G.</given-names></name> <name><surname>Agnoli</surname> <given-names>S.</given-names></name> <name><surname>Baldaro</surname> <given-names>B.</given-names></name> <name><surname>Bitti</surname> <given-names>P. E. R.</given-names></name> <name><surname>Surcinelli</surname> <given-names>P.</given-names></name></person-group> (<year>2013</year>). <article-title>Facial expressions of emotions: recognition accuracy and affective reactions during late childhood</article-title>. <source>J. Psychol.</source> <volume>147</volume>, <fpage>599</fpage>&#x2013;<lpage>617</lpage>. doi: <pub-id pub-id-type="doi">10.1080/00223980.2012.727891</pub-id>, PMID: <pub-id pub-id-type="pmid">24199514</pub-id></citation></ref>
<ref id="ref33"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Matsumoto</surname> <given-names>D.</given-names></name> <name><surname>Hwang</surname> <given-names>H. S.</given-names></name></person-group> (<year>2011</year>). <article-title>Judgments of facial expressions of emotion in profile</article-title>. <source>Emotion</source> <volume>11</volume>, <fpage>1223</fpage>&#x2013;<lpage>1229</lpage>. doi: <pub-id pub-id-type="doi">10.1037/a0024356</pub-id>, PMID: <pub-id pub-id-type="pmid">21942701</pub-id></citation></ref>
<ref id="ref34"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>McClure</surname> <given-names>E. B.</given-names></name></person-group> (<year>2000</year>). <article-title>A meta-analytic review of sex differences in facial expression processing and their development in infants, children, and adolescents</article-title>. <source>Psychol. Bull.</source> <volume>126</volume>, <fpage>424</fpage>&#x2013;<lpage>453</lpage>. doi: <pub-id pub-id-type="doi">10.1037/0033-2909.126.3.424</pub-id>, PMID: <pub-id pub-id-type="pmid">10825784</pub-id></citation></ref>
<ref id="ref35"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Money</surname> <given-names>J.</given-names></name> <name><surname>Ehrhardt</surname> <given-names>A. A.</given-names></name></person-group> (<year>1972</year>). <source>Man and woman, boy and girl: Differentiation and dimorphism of gender identity from conception to maturity</source>. <publisher-loc>Baltimore, MD</publisher-loc>: <publisher-name>Johns Hopkins University Press</publisher-name>.</citation></ref>
<ref id="ref36"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Montirosso</surname> <given-names>R.</given-names></name> <name><surname>Peverelli</surname> <given-names>M.</given-names></name> <name><surname>Frigerio</surname> <given-names>E.</given-names></name> <name><surname>Crespi</surname> <given-names>M.</given-names></name> <name><surname>Brogatti</surname> <given-names>R.</given-names></name></person-group> (<year>2010</year>). <article-title>The development of dynamic facial expression recognition at different intensities in 4- to 18-year-olds</article-title>. <source>Soc. Dev.</source> <volume>19</volume>, <fpage>71</fpage>&#x2013;<lpage>92</lpage>. doi: <pub-id pub-id-type="doi">10.1111/j.1467-9507.2008.00527.x</pub-id></citation></ref>
<ref id="ref37"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Naruse</surname> <given-names>S.</given-names></name> <name><surname>Hashimoto</surname> <given-names>T.</given-names></name> <name><surname>Mori</surname> <given-names>K.</given-names></name> <name><surname>Tsuda</surname> <given-names>Y.</given-names></name> <name><surname>Takahara</surname> <given-names>M.</given-names></name> <name><surname>Kagami</surname> <given-names>S.</given-names></name></person-group> (<year>2013</year>). <article-title>Developmental changes in facial expression recognition in Japanese school-age children</article-title>. <source>J. Med. Investig.</source> <volume>60</volume>, <fpage>114</fpage>&#x2013;<lpage>120</lpage>. doi: <pub-id pub-id-type="doi">10.2152/jmi.60.114</pub-id>, PMID: <pub-id pub-id-type="pmid">23614919</pub-id></citation></ref>
<ref id="ref38"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Neff</surname> <given-names>L. A.</given-names></name> <name><surname>Karney</surname> <given-names>B. R.</given-names></name></person-group> (<year>2005</year>). <article-title>Gender differences in social support: a question of skill or responsiveness?</article-title> <source>J. Pers. Soc. Psychol.</source> <volume>88</volume>, <fpage>79</fpage>&#x2013;<lpage>90</lpage>. doi: <pub-id pub-id-type="doi">10.1037/0022-3514.88.1.79</pub-id></citation></ref>
<ref id="ref39"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ogren</surname> <given-names>M.</given-names></name> <name><surname>Johnson</surname> <given-names>S. P.</given-names></name></person-group> (<year>2021</year>). <article-title>Factors facilitating early emotion understanding development: contributions to individual differences</article-title>. <source>Hum. Dev.</source> <volume>64</volume>, <fpage>108</fpage>&#x2013;<lpage>118</lpage>. doi: <pub-id pub-id-type="doi">10.1159/000511628</pub-id>, PMID: <pub-id pub-id-type="pmid">34305161</pub-id></citation></ref>
<ref id="ref40"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Passarelli</surname> <given-names>M.</given-names></name> <name><surname>Masini</surname> <given-names>M.</given-names></name> <name><surname>Bracco</surname> <given-names>F.</given-names></name> <name><surname>Petrosino</surname> <given-names>M.</given-names></name> <name><surname>Chiorri</surname> <given-names>C.</given-names></name></person-group> (<year>2018</year>). <article-title>Development and validation of the facial expression recognition test(FERT)</article-title>. <source>Psychol. Assess.</source> <volume>30</volume>, <fpage>1479</fpage>&#x2013;<lpage>1490</lpage>. doi: <pub-id pub-id-type="doi">10.1037/pas0000595</pub-id>, PMID: <pub-id pub-id-type="pmid">30024180</pub-id></citation></ref>
<ref id="ref41"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Phan</surname> <given-names>K. L.</given-names></name> <name><surname>Wager</surname> <given-names>T.</given-names></name> <name><surname>Taylor</surname> <given-names>S. F.</given-names></name> <name><surname>Liberzon</surname> <given-names>I.</given-names></name></person-group> (<year>2002</year>). <article-title>Functional neuroanatomy of emotion: a meta-analysis of emotion activation studies in PET and fMRI</article-title>. <source>NeuroImage</source> <volume>16</volume>, <fpage>331</fpage>&#x2013;<lpage>348</lpage>. doi: <pub-id pub-id-type="doi">10.1006/nimg.2002.1087</pub-id>, PMID: <pub-id pub-id-type="pmid">12030820</pub-id></citation></ref>
<ref id="ref42"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rochat</surname> <given-names>P.</given-names></name> <name><surname>Striano</surname> <given-names>T.</given-names></name> <name><surname>Blatt</surname> <given-names>L.</given-names></name></person-group> (<year>2002</year>). <article-title>Differential effects of happy, neutral, and sad still-faces on 2-, 4-and 6-month-old infants</article-title>. <source>Infant Child Dev. Int. J. Res. Pract.</source> <volume>11</volume>, <fpage>289</fpage>&#x2013;<lpage>303</lpage>. doi: <pub-id pub-id-type="doi">10.1002/icd.259</pub-id></citation></ref>
<ref id="ref43"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rodger</surname> <given-names>H.</given-names></name> <name><surname>Vizioli</surname> <given-names>L.</given-names></name> <name><surname>Ouyang</surname> <given-names>X.</given-names></name> <name><surname>Caldara</surname> <given-names>R.</given-names></name></person-group> (<year>2015</year>). <article-title>Mapping the development of facial expression recognition</article-title>. <source>Dev. Sci.</source> <volume>18</volume>, <fpage>926</fpage>&#x2013;<lpage>939</lpage>. doi: <pub-id pub-id-type="doi">10.1111/desc.12281</pub-id></citation></ref>
<ref id="ref44"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rosip</surname> <given-names>J. C.</given-names></name> <name><surname>Hall</surname> <given-names>J. A.</given-names></name></person-group> (<year>2004</year>). <article-title>Knowledge of nonverbal cues, gender, and nonverbal decoding accuracy</article-title>. <source>J. Nonverbal Behav.</source> <volume>28</volume>, <fpage>267</fpage>&#x2013;<lpage>286</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s10919-004-4159-6</pub-id></citation></ref>
<ref id="ref45"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rot</surname> <given-names>M.</given-names></name> <name><surname>Friederici</surname> <given-names>C.</given-names></name> <name><surname>Krause</surname> <given-names>S. C.</given-names></name> <name><surname>de Jong</surname> <given-names>P. J.</given-names></name></person-group> (<year>2022</year>). <article-title>Interpersonal responses to facial expressions of disgust, anger, and happiness in individuals with varying levels of social anxiety</article-title>. <source>PLoS One</source> <volume>17</volume>:<fpage>e0263990</fpage>. doi: <pub-id pub-id-type="doi">10.1371/journal.pone.0263990</pub-id></citation></ref>
<ref id="ref46"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ruffman</surname> <given-names>T.</given-names></name> <name><surname>Kong</surname> <given-names>Q.</given-names></name> <name><surname>Lim</surname> <given-names>H. M.</given-names></name> <name><surname>Du</surname> <given-names>K.</given-names></name> <name><surname>Tiainen</surname> <given-names>E.</given-names></name></person-group> (<year>2023</year>). <article-title>Recognition of facial emotions across the lifespan: 8-year-olds resemble older adults</article-title>. <source>Br. J. Dev. Psychol.</source> <volume>41</volume>, <fpage>128</fpage>&#x2013;<lpage>139</lpage>. doi: <pub-id pub-id-type="doi">10.1111/bjdp.12442</pub-id>, PMID: <pub-id pub-id-type="pmid">36773033</pub-id></citation></ref>
<ref id="ref47"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sasson</surname> <given-names>N. J.</given-names></name> <name><surname>Pinkham</surname> <given-names>A. E.</given-names></name> <name><surname>Richard</surname> <given-names>J.</given-names></name> <name><surname>Hughett</surname> <given-names>P.</given-names></name> <name><surname>Gur</surname> <given-names>R. E.</given-names></name> <name><surname>Gur</surname> <given-names>R. C.</given-names></name></person-group> (<year>2010</year>). <article-title>Controlling for response biases clarifies sex and age differences in facial affect recognition</article-title>. <source>J. Nonverbal Behav.</source> <volume>34</volume>, <fpage>207</fpage>&#x2013;<lpage>221</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s10919-010-0092-z</pub-id></citation></ref>
<ref id="ref48"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sonneville</surname> <given-names>L. M. J.</given-names></name> <name><surname>Verschoor</surname> <given-names>C. A.</given-names></name> <name><surname>Njiokiktjien</surname> <given-names>C.</given-names></name> <name><surname>Veld</surname> <given-names>V. O.</given-names></name> <name><surname>Toorenaar</surname> <given-names>N.</given-names></name> <name><surname>Vranken</surname> <given-names>M.</given-names></name></person-group> (<year>2002</year>). <article-title>Facial identity and facial emotions: speed, accuracy, and processing strategies in children and adults</article-title>. <source>J. Clin. Exp. Neuropsychol.</source> <volume>24</volume>, <fpage>200</fpage>&#x2013;<lpage>213</lpage>. doi: <pub-id pub-id-type="doi">10.1076/jcen.24.2.200.989</pub-id>, PMID: <pub-id pub-id-type="pmid">11992203</pub-id></citation></ref>
<ref id="ref49"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sou</surname> <given-names>K. L.</given-names></name> <name><surname>Xu</surname> <given-names>H.</given-names></name></person-group> (<year>2019</year>). <article-title>Brief facial emotion aftereffect occurs earlier for anger than happy adaptation</article-title>. <source>Vis. Res.</source> <volume>162</volume>, <fpage>35</fpage>&#x2013;<lpage>42</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.visres.2019.07.002</pub-id>, PMID: <pub-id pub-id-type="pmid">31325461</pub-id></citation></ref>
<ref id="ref50"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Vesker</surname> <given-names>M.</given-names></name> <name><surname>Bahn</surname> <given-names>D.</given-names></name> <name><surname>Deg&#x00E9;</surname> <given-names>F.</given-names></name> <name><surname>Kauschke</surname> <given-names>C.</given-names></name> <name><surname>Schwarzer</surname> <given-names>G.</given-names></name></person-group> (<year>2018</year>). <article-title>Developmental changes in the categorical processing of positive and negative facial expressions</article-title>. <source>PLoS One</source> <volume>13</volume>:<fpage>e0201521</fpage>. doi: <pub-id pub-id-type="doi">10.1371/journal.pone.0201521</pub-id></citation></ref>
<ref id="ref51"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Vicari</surname> <given-names>S.</given-names></name> <name><surname>Reilly</surname> <given-names>J. S.</given-names></name> <name><surname>Pasqualetti</surname> <given-names>P.</given-names></name> <name><surname>Vizzotto</surname> <given-names>A.</given-names></name> <name><surname>Caltagirone</surname> <given-names>C.</given-names></name></person-group> (<year>2000</year>). <article-title>Recognition of facial expressions of emotions in school-age children: the intersection of perceptual and semantic categories</article-title>. <source>Acta Paediatr.</source> <volume>89</volume>, <fpage>836</fpage>&#x2013;<lpage>845</lpage>. doi: <pub-id pub-id-type="doi">10.1111/j.1651-2227.2000.tb00392.x</pub-id></citation></ref>
<ref id="ref52"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Watling</surname> <given-names>D.</given-names></name> <name><surname>Workman</surname> <given-names>L.</given-names></name> <name><surname>Bourne</surname> <given-names>V. J.</given-names></name></person-group> (<year>2012</year>). <article-title>Emotion lateralisation: developments throughout the lifespan</article-title>. <source>Laterality</source> <volume>17</volume>, <fpage>389</fpage>&#x2013;<lpage>411</lpage>. doi: <pub-id pub-id-type="doi">10.1080/1357650X.2012.682160</pub-id></citation></ref>
<ref id="ref53"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Widen</surname> <given-names>S. C.</given-names></name> <name><surname>Russell</surname> <given-names>J. A.</given-names></name></person-group> (<year>2008</year>). <article-title>Children acquire emotion categories gradually</article-title>. <source>Cogn. Dev.</source> <volume>23</volume>, <fpage>291</fpage>&#x2013;<lpage>312</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cogdev.2008.01.002</pub-id></citation></ref>
<ref id="ref54"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Widen</surname> <given-names>S. C.</given-names></name> <name><surname>Russell</surname> <given-names>J. A.</given-names></name></person-group> (<year>2010a</year>). <article-title>Children&#x2019;s scripts for social emotions: cause and consequences are more central than are facial expressions</article-title>. <source>Br. J. Dev. Psychol.</source> <volume>28</volume>, <fpage>565</fpage>&#x2013;<lpage>581</lpage>. doi: <pub-id pub-id-type="doi">10.1348/026151009X457550d</pub-id>, PMID: <pub-id pub-id-type="pmid">20849034</pub-id></citation></ref>
<ref id="ref55"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Widen</surname> <given-names>S. C.</given-names></name> <name><surname>Russell</surname> <given-names>J. A.</given-names></name></person-group> (<year>2010b</year>). <article-title>The &#x201C;disgust face&#x201D; conveys anger to children</article-title>. <source>Emotion</source> <volume>10</volume>, <fpage>455</fpage>&#x2013;<lpage>466</lpage>. doi: <pub-id pub-id-type="doi">10.1037/a0019151</pub-id></citation></ref>
<ref id="ref56"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhao</surname> <given-names>K.</given-names></name> <name><surname>Liu</surname> <given-names>M.</given-names></name> <name><surname>Gu</surname> <given-names>J.</given-names></name> <name><surname>Mo</surname> <given-names>F.</given-names></name> <name><surname>Fu</surname> <given-names>X.</given-names></name> <name><surname>Liu</surname> <given-names>C. H.</given-names></name></person-group> (<year>2020</year>). <article-title>The preponderant role of fusiform face area for the facial expression confusion effect: an MEG study</article-title>. <source>Neuroscience</source> <volume>433</volume>, <fpage>42</fpage>&#x2013;<lpage>52</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroscience.2020.03.001</pub-id>, PMID: <pub-id pub-id-type="pmid">32169552</pub-id></citation></ref>
<ref id="ref57"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhao</surname> <given-names>K.</given-names></name> <name><surname>Zhao</surname> <given-names>J.</given-names></name> <name><surname>Zhang</surname> <given-names>M.</given-names></name> <name><surname>Cui</surname> <given-names>Q.</given-names></name> <name><surname>Fu</surname> <given-names>X. L.</given-names></name></person-group> (<year>2017</year>). <article-title>Neural responses to rapid facial expressions of fear and surprise</article-title>. <source>Front. Psychol.</source> <volume>8</volume>:<fpage>761</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2017.00761</pub-id>, PMID: <pub-id pub-id-type="pmid">28539909</pub-id></citation></ref>
</ref-list>
</back>
</article>