<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Neurosci.</journal-id>
<journal-title>Frontiers in Neuroscience</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Neurosci.</abbrev-journal-title>
<issn pub-type="epub">1662-453X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fnins.2024.1482849</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Neuroscience</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Eye movement characteristics of emotional face recognizing task in patients with mild to moderate depression</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" equal-contrib="yes">
<name><surname>Yang</surname> <given-names>Qian</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="author-notes" rid="fn0025"><sup>&#x2020;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/2805133/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author" equal-contrib="yes">
<name><surname>Fu</surname> <given-names>Yanyan</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="author-notes" rid="fn0025"><sup>&#x2020;</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Yang</surname> <given-names>Qiuli</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Yin</surname> <given-names>Dongqing</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Zhao</surname> <given-names>Yanan</given-names></name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Wang</surname> <given-names>Hao</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Zhang</surname> <given-names>Han</given-names></name>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Sun</surname> <given-names>Yanran</given-names></name>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Xie</surname> <given-names>Xinyi</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Du</surname> <given-names>Jian</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Institute of Basic Research in Clinical Medicine, China Academy of Chinese Medical Sciences</institution>, <addr-line>Beijing</addr-line>, <country>China</country></aff>
<aff id="aff2"><sup>2</sup><institution>Beijing Anding Hospital, Capital Medical University</institution>, <addr-line>Beijing</addr-line>, <country>China</country></aff>
<aff id="aff3"><sup>3</sup><institution>Institute of Acupuncture and Moxibustion, China Academy of Chinese Medical Sciences</institution>, <addr-line>Beijing</addr-line>, <country>China</country></aff>
<aff id="aff4"><sup>4</sup><institution>Shandong University of Traditional Chinese Medicine</institution>, <addr-line>Jinan, Shandong</addr-line>, <country>China</country></aff>
<aff id="aff5"><sup>5</sup><institution>Jiangxi University of Chinese Medicine</institution>, <addr-line>Nanchang</addr-line>, <country>China</country></aff>
<author-notes>
<fn fn-type="edited-by" id="fn0001"><p>Edited by: Chuanliang Han, The Chinese University of Hong Kong, China</p></fn>
<fn fn-type="edited-by" id="fn0002"><p>Reviewed by: Xunbing Shen, Jiangxi University of Chinese Medicine, China</p><p>Qinghua He, Southwest University, China</p><p>Wenbo Ma, North Sichuan Medical College, China</p></fn>
<corresp id="c001">&#x002A;Correspondence: Jian Du, <email>djtianlai@163.com</email></corresp>
<fn fn-type="equal" id="fn0025"><p><sup>&#x2020;</sup>These authors share first authorship</p></fn>
</author-notes>
<pub-date pub-type="epub">
<day>13</day>
<month>11</month>
<year>2024</year>
</pub-date>
<pub-date pub-type="collection">
<year>2024</year>
</pub-date>
<volume>18</volume>
<elocation-id>1482849</elocation-id>
<history>
<date date-type="received">
<day>18</day>
<month>08</month>
<year>2024</year>
</date>
<date date-type="accepted">
<day>28</day>
<month>10</month>
<year>2024</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2024 Yang, Fu, Yang, Yin, Zhao, Wang, Zhang, Sun, Xie and Du.</copyright-statement>
<copyright-year>2024</copyright-year>
<copyright-holder>Yang, Fu, Yang, Yin, Zhao, Wang, Zhang, Sun, Xie and Du</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<sec id="sec1">
<title>Objective</title>
<p>Depression is a complex affective disorder characterized by high prevalence and severe impact, commonly presenting with cognitive impairment. The objective diagnosis of depression lacks precise standards. This study investigates eye movement characteristics during emotional face recognition task (EFRT) in depressive patients to provide empirical support for objective diagnosis.</p>
</sec>
<sec id="sec2">
<title>Methods</title>
<p>We recruited 43 patients with depression (Depressive patients, DP) from a psychiatric hospital and 44 healthy participants (Healthy Control, HC) online. All participants completed an EFRT comprising 120 trials. Each trial presented a gray screen for 800&#x2009;ms followed by a stimulus image for judgment. Emotions were categorized as positive, neutral, or negative. Eye movement trajectories were recorded throughout the task. Latency of First Fixation (LFF), Latency of First Fixation for Eye AOI, and Latency of First Fixation for Mouth AOI were used as representative indicators of early attention, Proportion of Eye AOI, and Proportion of Mouth AOI as measures of intermediate attention, Accuracy (ACC) and Reaction Time (RT) as behavioral indicators of late-stage attention. In this study, these metrics were employed to explore the differences between patients with depression and healthy individuals.</p>
</sec>
<sec id="sec3">
<title>Results</title>
<p>Compared to healthy participants, individuals with depression exhibit longer first fixation latencies on the eyes and mouth during the early attention stage of emotional face recognition, indicating an avoidance tendency toward key facial recognition cues. In the mid-to-late attention stages, depressive individuals show an increased fixation ratio on the eyes and a decreased fixation ratio on the mouth, along with lower accuracy and longer response times. These findings suggest that, relative to healthy individuals, individuals with depression have deficits in facial recognition.</p>
</sec>
<sec id="sec4">
<title>Conclusion</title>
<p>This study identified distinct attention patterns and cognitive deficits in emotional face recognition among individuals with depression compared to healthy individuals, providing an attention-based approach for exploring potential clinical diagnostic markers for depression.</p>
</sec>
</abstract>
<kwd-group>
<kwd>eye movement</kwd>
<kwd>depression</kwd>
<kwd>AOI</kwd>
<kwd>emotional facial expression recognition</kwd>
<kwd>cognitive deficit</kwd>
</kwd-group>
<counts>
<fig-count count="5"/>
<table-count count="3"/>
<equation-count count="0"/>
<ref-count count="36"/>
<page-count count="10"/>
<word-count count="6076"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Translational Neuroscience</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="sec5">
<label>1</label>
<title>Introduction</title>
<p>Depression is a complex affective disorder with significant emotional, cognitive, and physical symptoms, characterized by persistent low mood, reduced activity, and slowed cognitive function. According to the Global Burden of Disease Study, approximately 280 million people worldwide suffer from depression, including 5% of adults and 5.7% of individuals over 60, and 25% increase has been triggered by COVID-19 (<xref ref-type="bibr" rid="ref32">World Health Organization, 2022</xref>). Depression is significantly associated with disability and can lead to severe consequences, including suicide, negatively impacting individuals&#x2019; mental and physical health (<xref ref-type="bibr" rid="ref14">Iancu et al., 2020</xref>; <xref ref-type="bibr" rid="ref21">Morin et al., 2020</xref>).</p>
<p>Most patients with depression experience cognitive dysfunction, including deficits in executive function, attention, memory, and processing speed (<xref ref-type="bibr" rid="ref35">Yan and Li, 2018</xref>). These impairments manifest as reduced cognitive flexibility, decision-making, and inhibitory control, along with difficulties in maintaining attention, short-term memory loss, and slower reaction times (<xref ref-type="bibr" rid="ref13">Hollon et al., 2006</xref>; <xref ref-type="bibr" rid="ref16">Koenig et al., 2014</xref>; <xref ref-type="bibr" rid="ref18">Lee et al., 2012</xref>; <xref ref-type="bibr" rid="ref29">Wagner et al., 2012</xref>). Depression also causes a negative emotional bias, where individuals exhibit a preference for negative stimuli, leading to the misinterpretation of information through a negative lens. This bias is linked to deeply ingrained negative self-schemas that sustain depressive symptoms (<xref ref-type="bibr" rid="ref15">Jiang, 2024</xref>). Studies show depressed individuals have slower reaction times when recognizing facial expressions, particularly neutral ones, and reduced accuracy in identifying positive expressions, often mistaking them for neutral or negative (<xref ref-type="bibr" rid="ref19">Leppanen et al., 2004</xref>; <xref ref-type="bibr" rid="ref25">Sfarlea et al., 2018</xref>). However, they generally retain the ability to recognize sad expressions (<xref ref-type="bibr" rid="ref4">Dalili et al., 2015</xref>).</p>
<p>The clinical diagnosis of depression primarily relies on patients&#x2019; clinical symptoms supplemented by depression-related scale scores, lacking specific physiological and biochemical indicators as auxiliary diagnostic criteria (<xref ref-type="bibr" rid="ref3">Cuijpers et al., 2020</xref>). This absence of a &#x201C;gold standard&#x201D; akin to the diagnosis of other organic diseases has prompted numerous studies in recent years to explore objective diagnostic indicators for depression using physiological signals, biochemical markers, and facial visual features (<xref ref-type="bibr" rid="ref1">Byun et al., 2019</xref>; <xref ref-type="bibr" rid="ref17">Koller-Schlaud et al., 2020</xref>; <xref ref-type="bibr" rid="ref24">Rushia et al., 2020</xref>; <xref ref-type="bibr" rid="ref28">Thoduparambil et al., 2020</xref>; <xref ref-type="bibr" rid="ref33">Xing et al., 2019</xref>; <xref ref-type="bibr" rid="ref5">Du et al., 2022</xref>).</p>
<p>The diagnostic approach based on facial visual features objectively assesses the severity of depression by analyzing relevant information from the patient&#x2019;s face. It further summarizes behavioral characteristics specific to individuals with depression to guide clinical diagnoses made by doctors. The equipment required for this method is simple&#x2014;a camera&#x2014;making it cost-effective and easily accessible. Importantly, subjects do not need direct contact with the equipment during data collection, allowing them to maintain a natural state of mind without any hindrance and ensuring genuine mental state data can be captured. Some scholars believe that this method is particularly beneficial for patients experiencing reduced interest or pleasure due to its user-friendly nature. Consequently, it holds significant research value and potential for development (<xref ref-type="bibr" rid="ref5">Du et al., 2022</xref>).</p>
<p>Recently, there have been significant findings from eye movement experiments conducted on patients with depression. In free-viewing tasks, patients with depression show fewer fixation points and shorter total fixation times on positive images compared to healthy controls. Additionally, the transition time from negative to neutral stimuli was longer in the depression group than in the healthy group (<xref ref-type="bibr" rid="ref23">Qian et al., 2019</xref>). In dot-probe tasks, it was observed that after undergoing positive word training, patients with depression showed a significant reduction in both the number and duration of fixations on the negative portion of pictures when compared to their pre-training performance (<xref ref-type="bibr" rid="ref20">Liu et al., 2015</xref>). In recent years, an increasing number of researchers have integrated machine learning algorithms into modeling various indicators derived from eye movement experiments as well as other cross-modal indicators. The aim is to identify a combination method that yields optimal recognition rates for diagnosing depression (<xref ref-type="bibr" rid="ref22">Pan et al., 2019</xref>; <xref ref-type="bibr" rid="ref26">Shen et al., 2021</xref>; <xref ref-type="bibr" rid="ref31">Wang et al., 2018</xref>).</p>
<p>Therefore, this study focuses specifically on individuals with mild to moderate depression and utilizes eye tracking technology to investigate different eye movement indicators during an emotional face recognition task among depressive patients. A comparison will be made against healthy individuals to enhance support for behavioral experimental data related to objective diagnostic indicators for depression while providing valuable reference data for future research involving emotional face recognition and eye movements among depressive patients.</p>
</sec>
<sec sec-type="materials|methods" id="sec6">
<label>2</label>
<title>Materials and methods</title>
<sec id="sec7">
<label>2.1</label>
<title>Participants</title>
<p>All participants were required to have normal or corrected normal vision, as well as be free from color blindness or color weakness to eliminate any potential impact of visual impairments on the experiment. Participants were aged between 18 and 60, right-handed, and were required to read the experimental instructions, agree to the procedures, and sign an informed consent form.</p>
<p>Depressed patients were recruited from a tertiary psychiatric hospital in Beijing. They were assessed by 2&#x2013;3 psychiatrists, including one senior psychiatrist, using the 17-item Hamilton Depression Rating Scale (HAMD-17), and clinically diagnosed according to the criteria for depression outlined in the <italic>Diagnostic and Statistical Manual of Mental Disorders, Fifth Edition</italic> (DSM-5). Patients scoring &#x003E;7 on the HAMD-17 and clinically diagnosed with depression were included in the depression group, while individuals with other psychiatric disorders such as schizophrenia, anxiety disorders, or bipolar disorder were excluded.</p>
<p>The healthy control group was recruited online via social media platforms and consisted of volunteers. In addition to meeting the vision, age, and handedness requirements, healthy participants were required to score&#x2009;&#x003C;&#x2009;7 on the HAMD-17 and have a total score of &#x003C;160 on the Symptom Checklist-90 (SCL-90).</p>
<p>A total of 45 participants were recruited for the depression group and 51 for the healthy control group. Due to data quality issues, 2 depressed participants and 7 healthy participants were excluded, resulting in a final sample of 87 participants, with 43 in the depression group and 44 in the healthy control group.</p>
<p>This study was approved by the Ethics Committee of the Institute of Clinical Basic Medicine, China Academy of Chinese Medical Sciences, under the approval number P23009/PJ09.</p>
</sec>
<sec id="sec8">
<label>2.2</label>
<title>Experimental design and data collection</title>
<sec id="sec9">
<label>2.2.1</label>
<title>Emotional images</title>
<p>The emotional facial images used in the Emotional Face Recognition Task were sourced from the Chinese Affective Face Picture System (CAFPS), developed by Professor Yuejia Luo&#x2019;s team at Shenzhen University (<xref ref-type="bibr" rid="ref9001">Xu et al., 2011</xref>). All images were standardized in terms of color, brightness, and size and demonstrated good reliability and validity. A total of 120 images were selected from the CAFPS database for the experiment, with 40 images for each emotional category (positive, neutral, negative). The images were matched for gender, with 20 male and 20 female faces in each emotional category.</p>
</sec>
<sec id="sec10">
<label>2.2.2</label>
<title>Experimental instruments and software</title>
<p>The experiment employed a Tobii Pro X3-120 eye tracker produced by Tobii Technology AB, Sweden. The device is 324&#x2009;mm in length, weighs 118&#x2009;g, and operates at a sampling rate of 120&#x2009;Hz. It can be directly mounted on monitors or laptops with screens up to 25 inches using the accompanying adhesive stand (as shown in <xref ref-type="fig" rid="fig1">Figure 1</xref>). During the experiment, participants were seated approximately 65&#x2009;cm from the screen. Fixations were calculated using Tobii&#x2019;s built-in I-VT algorithm, which automatically identified fixations with a duration greater than 60&#x2009;ms. Adjacent fixations separated by less than 75&#x2009;ms and with an angular distance of less than 0.5 degrees were merged into a single fixation point. Eye-tracking data were collected and exported using Tobii Pro Lab Version 1.152. Behavioral data were collected, merged, and exported using E-Prime 3.0. Data preprocessing and statistical analysis were conducted using R Studio version 4.3.2, and data visualization was performed using GraphPad Prism version 10.1.2.</p>
<fig position="float" id="fig1">
<label>Figure 1</label>
<caption>
<p>Processing diagram of the single trial.</p>
</caption>
<graphic xlink:href="fnins-18-1482849-g001.tif"/>
</fig>
</sec>
<sec id="sec11">
<label>2.2.3</label>
<title>Experimental design and procedure</title>
<p>The experiment followed a 2 (group: depression patients/healthy controls)&#x2009;&#x00D7;&#x2009;3 (emotional face type: positive/neutral/negative) mixed factorial design. Participants were required to complete the Emotional Face Recognition Task (EFRT), in which they were asked to identify the emotional attributes of randomly presented facial images. Meanwhile, the eye tracker recorded their eye movement trajectories.</p>
<p>Before the experiment, the eye-tracking equipment was calibrated. Upon arrival at the laboratory, participants were briefed on the task and informed of the relevant precautions. They then performed the Emotional Face Recognition Task. The experiment took place in a soundproof, windowless room, with the main light source being a ceiling light in the center. The computer faced the wall with its back to the light source to ensure soft, non-reflective lighting conditions.</p>
<p>The eye-tracking experiment followed an event-related design with a total of 120 trials presented in random order. The flow of a single trial is illustrated in <xref ref-type="fig" rid="fig1">Figure 1</xref>. Each trial began with a blank gray screen displayed for 800&#x2009;ms, followed by the random presentation of an emotional face image at the center of the monitor. The duration of each image presentation was not fixed and depended on the participant&#x2019;s response. Participants were required to make a judgment by pressing one of three keys: &#x201C;1&#x201D; for positive, &#x201C;2&#x201D; for neutral, and &#x201C;3&#x201D; for negative. Once a selection was made, the trial ended, and the next trial began.</p>
</sec>
</sec>
<sec id="sec12">
<label>2.3</label>
<title>Data preprocessing and eye movement index calculations</title>
<sec id="sec13">
<label>2.3.1</label>
<title>Zoning and labeling of AOI</title>
<p>In Python, the shape key point coordinate prediction tool of the DLIB library and the standardized 68-point facial feature point calibration model are used to automatically identify the facial features coordinates of the stimulus images, the location is recorded by drawing and generating AOI coordinates table, and the AOI is judged by comparing the fixation point x and Y coordinates with the AOI coordinates, blue is Mouth, red is Face, and the Other areas are all Other (<xref ref-type="fig" rid="fig2">Figure 2</xref>).</p>
<fig position="float" id="fig2">
<label>Figure 2</label>
<caption>
<p>The effect diagram of AOI division.</p>
</caption>
<graphic xlink:href="fnins-18-1482849-g002.tif"/>
</fig>
</sec>
<sec id="sec14">
<label>2.3.2</label>
<title>Data screening and imputation</title>
<p>In this study, to clearly differentiate between eye-tracking data and reaction time/accuracy data, we referred to the latter as &#x201C;behavioral data.&#x201D; Eye-tracking data were first stored in Tobii Pro Lab software, and during export, only data with a valid sampling rate of &#x2265;60% were selected. Invalid sampling points during the experiment could result from undetected eye movements or unclassified samples, such as when participants blinked, closed their eyes, or looked away from the screen. During this process, data from two participants in the depression group were excluded.</p>
<p>Both eye-tracking and behavioral data were preprocessed and analyzed using R. Outliers beyond 1.5 times the interquartile range (IQR) were excluded. Missing values were imputed using mean substitution. After data cleaning, missing values were imputed as follows: Latency of first fixation (12%), LFF of eye (3%), LFF of mouth (3%), Look proportion of eye (2%), Look proportion of mouth (0%), Accuracy (9%), and Reaction time (5%).</p>
</sec>
</sec>
<sec id="sec15">
<label>2.4</label>
<title>Data analysis</title>
<p>In this study, the analyzed data primarily comprised eye-tracking and behavioral data. Eye-tracking metrics were derived from the raw data, including the latency of the first fixation (LFF) for each trial, the LFF of the eye AOI (area of interest), the LFF of the mouth AOI, the proportion of fixation time on the eye AOI, and the proportion of fixation time on the mouth AOI. Behavioral data included reaction time and accuracy.</p>
<sec id="sec16">
<label>2.4.1</label>
<title>Eye-tracking metrics calculation</title>
<p>First, the timestamps from the raw data were used to record the stimulus start time, end time, and the start and end times of each fixation point. Statistical metrics were then calculated using the following formulas.</p>
<sec id="sec17">
<label>2.4.1.1</label>
<title>Latency of first fixation</title>
<p>LFF-related eye-tracking metrics reflect early visual attention characteristics. By analyzing the LFF in different AOIs, early attention preferences can be inferred.</p>
<sec id="sec18">
<label>2.4.1.1.1</label>
<title>LFF for each trial</title>
<p>Calculated as the time from the stimulus onset (STS, Start Time of Stimulus) to the first fixation onset (STFF, Start Time of First Fixation). This measures the time from when the image appears until the first meaningful fixation occurs. Note that fixations that started before but ended after stimulus onset were excluded to ensure that the first fixation was drawn by the stimulus image.</p>
</sec>
<sec id="sec19">
<label>2.4.1.1.2</label>
<title>LFF for eye AOI</title>
<p>Time from the stimulus onset to the first fixation on the eye AOI.</p>
</sec>
<sec id="sec20">
<label>2.4.1.1.3</label>
<title>LFF for mouth AOI</title>
<p>Time from the stimulus onset to the first fixation on the mouth AOI.</p>
</sec>
</sec>
<sec id="sec21">
<label>2.4.1.2</label>
<title>Proportion of look time</title>
<p>Fixation proportion is calculated by measuring the fixation time in different AOIs. Fixation duration is a common metric for mid-term fixation analysis, but since the stimulus presentation time was not fixed in this study, fixation duration is highly correlated with reaction time, making it less suitable as a statistical metric. Instead, we converted fixation duration to a proportion, allowing a comparison of depression characteristics across different AOIs.</p>
<sec id="sec22">
<label>2.4.1.2.1</label>
<title>Look proportion of eye</title>
<p>First, the fixation duration for each AOI in a single trial was calculated, then summed to get the total fixation duration (total look time). The fixation proportion for each AOI was then calculated by dividing the fixation duration for that AOI by the total fixation duration. Look proportion of eye&#x2009;=&#x2009;Look time of eye AOI/Total look time</p>
</sec>
<sec id="sec23">
<label>2.4.1.2.2</label>
<title>Look proportion of mouth</title>
<p>Similar to the above, Look proportion of mouth&#x2009;=&#x2009;Look time of mouth AOI/Total look time.</p>
</sec>
</sec>
</sec>
<sec id="sec24">
<label>2.4.2</label>
<title>Statistical analysis</title>
<p>For demographic data, age differences between the two groups were analyzed using a <italic>t</italic>-test, and gender differences were compared using a chi-square test. Since age differences were significant, age was controlled as a covariate in the analysis of eye-tracking and behavioral data, using a 2&#x00D7;3 ANCOVA design. <italic>Post-hoc</italic> tests and simple effects analyses were corrected using the Bonferroni method for <italic>p</italic>-values.</p>
</sec>
</sec>
</sec>
<sec sec-type="results" id="sec25">
<label>3</label>
<title>Results</title>
<sec id="sec26">
<label>3.1</label>
<title>Demographic information</title>
<p>Demographic information was statistically presented in <xref ref-type="table" rid="tab1">Table 1</xref>, there was no difference in gender composition between the DP group and the HC group, however, the age of the DP group was significantly larger than that of the HC group, so the covariance analysis was used to count the dependent variable and the age was a covariate to control for the subsequent statistics.</p>
<table-wrap position="float" id="tab1">
<label>Table 1</label>
<caption>
<p>Demographic information for DP and HC.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th rowspan="2"/>
<th align="center" valign="top" colspan="2">Group</th>
<th align="center" valign="top" rowspan="2">&#x03C7;<sup>2</sup>/T</th>
<th align="center" valign="top" rowspan="2"><italic>p</italic>-value</th>
</tr>
<tr>
<th align="center" valign="top">DP (<italic>n</italic>&#x2009;=&#x2009;43)</th>
<th align="center" valign="top">HC (<italic>n</italic>&#x2009;=&#x2009;44)</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="middle">Gender (M/F)</td>
<td align="center" valign="middle">(6/20)</td>
<td align="center" valign="middle">(3/14)</td>
<td align="center" valign="middle">&#x03C7;<sup>2</sup> =&#x2009;0.851</td>
<td align="center" valign="middle">0.356</td>
</tr>
<tr>
<td align="left" valign="middle">Age</td>
<td align="center" valign="middle">39.6&#x2009;&#x00B1;&#x2009;13.6</td>
<td align="center" valign="middle">26.8&#x2009;&#x00B1;&#x2009;2.62</td>
<td align="center" valign="middle">T&#x2009;=&#x2009;6.095</td>
<td align="center" valign="middle">&#x003C;0.001&#x002A;&#x002A;&#x002A;</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="sec27">
<label>3.2</label>
<title>Analysis of covariance for statistical indicators</title>
<p><xref ref-type="table" rid="tab2">Table 2</xref> showed the descriptive statistical table of the values of each dependent variable in the DP group and the HC group under different emotional face conditions, and the summary table of the results of analysis of covariance for all dependent variables was shown in <xref ref-type="table" rid="tab3">Table 3</xref>.</p>
<table-wrap position="float" id="tab2">
<label>Table 2</label>
<caption>
<p>Descriptive statistical analysis.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Types of facial emotions</th>
<th align="center" valign="top" colspan="2">Positive</th>
<th align="center" valign="top" colspan="2">Neutral</th>
<th align="center" valign="top" colspan="2">Negative</th>
</tr>
<tr>
<th align="left" valign="top">Group</th>
<th align="center" valign="top">DP(<italic>N</italic>&#x2009;=&#x2009;43)</th>
<th align="center" valign="top">HC(<italic>N</italic>&#x2009;=&#x2009;44)</th>
<th align="center" valign="top">DP(<italic>N</italic>&#x2009;=&#x2009;43)</th>
<th align="center" valign="top">HC(<italic>N</italic>&#x2009;=&#x2009;44)</th>
<th align="center" valign="top">DP(<italic>N</italic>&#x2009;=&#x2009;43)</th>
<th align="center" valign="top">HC(<italic>N</italic>&#x2009;=&#x2009;44)</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Latency of first fixation (s)</td>
<td align="center" valign="top">0.26&#x2009;&#x00B1;&#x2009;0.08</td>
<td align="center" valign="top">0.24&#x2009;&#x00B1;&#x2009;0.06</td>
<td align="center" valign="top">0.27&#x2009;&#x00B1;&#x2009;0.08</td>
<td align="center" valign="top">0.25&#x2009;&#x00B1;&#x2009;0.07</td>
<td align="center" valign="top">0.25&#x2009;&#x00B1;&#x2009;0.07</td>
<td align="center" valign="top">0.24&#x2009;&#x00B1;&#x2009;0.07</td>
</tr>
<tr>
<td align="left" valign="top">Latency of first fixation for eye AOI (s)</td>
<td align="center" valign="top">1.04&#x2009;&#x00B1;&#x2009;0.43</td>
<td align="center" valign="top">0.81&#x2009;&#x00B1;&#x2009;0.42</td>
<td align="center" valign="top">0.82&#x2009;&#x00B1;&#x2009;0.37</td>
<td align="center" valign="top">0.71&#x2009;&#x00B1;&#x2009;0.32</td>
<td align="center" valign="top">0.92&#x2009;&#x00B1;&#x2009;0.42</td>
<td align="center" valign="top">0.71&#x2009;&#x00B1;&#x2009;0.35</td>
</tr>
<tr>
<td align="left" valign="top">Latency of first fixation for mouth AOI (s)</td>
<td align="center" valign="top">1.26&#x2009;&#x00B1;&#x2009;0.57</td>
<td align="center" valign="top">1.05&#x2009;&#x00B1;&#x2009;0.46</td>
<td align="center" valign="top">1.73&#x2009;&#x00B1;&#x2009;0.66</td>
<td align="center" valign="top">1.33&#x2009;&#x00B1;&#x2009;0.55</td>
<td align="center" valign="top">1.48&#x2009;&#x00B1;&#x2009;0.61</td>
<td align="center" valign="top">1.20&#x2009;&#x00B1;&#x2009;0.56</td>
</tr>
<tr>
<td align="left" valign="top">Look proportion of eye AOI</td>
<td align="center" valign="top">0.31&#x2009;&#x00B1;&#x2009;0.11</td>
<td align="center" valign="top">0.31&#x2009;&#x00B1;&#x2009;0.11</td>
<td align="center" valign="top">0.38&#x2009;&#x00B1;&#x2009;0.13</td>
<td align="center" valign="top">0.34&#x2009;&#x00B1;&#x2009;0.12</td>
<td align="center" valign="top">0.34&#x2009;&#x00B1;&#x2009;0.11</td>
<td align="center" valign="top">0.33&#x2009;&#x00B1;&#x2009;0.11</td>
</tr>
<tr>
<td align="left" valign="top">Look proportion of mouth AOI</td>
<td align="center" valign="top">0.22&#x2009;&#x00B1;&#x2009;0.07</td>
<td align="center" valign="top">0.24&#x2009;&#x00B1;&#x2009;0.07</td>
<td align="center" valign="top">0.17&#x2009;&#x00B1;&#x2009;0.06</td>
<td align="center" valign="top">0.20&#x2009;&#x00B1;&#x2009;0.07</td>
<td align="center" valign="top">0.18&#x2009;&#x00B1;&#x2009;0.07</td>
<td align="center" valign="top">0.22&#x2009;&#x00B1;&#x2009;0.06</td>
</tr>
<tr>
<td align="left" valign="top">Accuracy</td>
<td align="center" valign="top">0.96&#x2009;&#x00B1;&#x2009;0.06</td>
<td align="center" valign="top">0.97&#x2009;&#x00B1;&#x2009;0.04</td>
<td align="center" valign="top">0.89&#x2009;&#x00B1;&#x2009;0.07</td>
<td align="center" valign="top">0.93&#x2009;&#x00B1;&#x2009;0.07</td>
<td align="center" valign="top">0.91&#x2009;&#x00B1;&#x2009;0.08</td>
<td align="center" valign="top">0.96&#x2009;&#x00B1;&#x2009;0.04</td>
</tr>
<tr>
<td align="left" valign="top">Reaction time(s)</td>
<td align="center" valign="top">1.30&#x2009;&#x00B1;&#x2009;0.45</td>
<td align="center" valign="top">1.03&#x2009;&#x00B1;&#x2009;0.31</td>
<td align="center" valign="top">1.49&#x2009;&#x00B1;&#x2009;0.43</td>
<td align="center" valign="top">1.21&#x2009;&#x00B1;&#x2009;0.36</td>
<td align="center" valign="top">1.44&#x2009;&#x00B1;&#x2009;0.43</td>
<td align="center" valign="top">1.07&#x2009;&#x00B1;&#x2009;0.27</td>
</tr>
</tbody>
</table>
</table-wrap>
<table-wrap position="float" id="tab3">
<label>Table 3</label>
<caption>
<p>Covariance analysis table.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th/>
<th align="center" valign="top" colspan="3">Main effect of group</th>
<th align="center" valign="top" colspan="3">Main effect of type of emotional images</th>
<th align="center" valign="top" colspan="3">Main effect of age</th>
<th align="center" valign="top" colspan="3">Interaction effect of group and type of emotional images</th>
</tr>
<tr>
<th/>
<th align="center" valign="top"><italic>F</italic>(1,85)</th>
<th align="center" valign="top"><italic>P</italic></th>
<th align="center" valign="top">&#x03B7;<sup>2</sup>p</th>
<th align="center" valign="top"><italic>F</italic>(2,85)</th>
<th align="center" valign="top"><italic>P</italic></th>
<th align="center" valign="top">&#x03B7;<sup>2</sup>p</th>
<th align="center" valign="top"><italic>F</italic>(1,85)</th>
<th align="center" valign="top"><italic>P</italic></th>
<th align="center" valign="top">&#x03B7;<sup>2</sup>p</th>
<th align="center" valign="top"><italic>F</italic>(2,85)</th>
<th align="center" valign="top"><italic>P</italic></th>
<th align="center" valign="top">&#x03B7;<sup>2</sup>p</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Latency of first fixation</td>
<td align="center" valign="middle">1.599</td>
<td align="center" valign="middle">0.207</td>
<td align="center" valign="middle">0.012</td>
<td align="center" valign="middle">0.112</td>
<td align="center" valign="middle">0.894</td>
<td align="center" valign="middle">0.001</td>
<td align="center" valign="middle">1.675</td>
<td align="center" valign="middle">0.197</td>
<td align="center" valign="middle">0.007</td>
<td align="center" valign="middle">0.444</td>
<td align="center" valign="middle">0.642</td>
<td align="center" valign="middle">0.003</td>
</tr>
<tr>
<td align="left" valign="top">Latency of first fixation for eye AOI</td>
<td align="center" valign="middle">17.519</td>
<td align="center" valign="middle">0.000</td>
<td align="center" valign="middle">0.016</td>
<td align="center" valign="middle">2.411</td>
<td align="center" valign="middle">0.092</td>
<td align="center" valign="middle">0.019</td>
<td align="center" valign="middle">6.920</td>
<td align="center" valign="middle">0.009</td>
<td align="center" valign="middle">0.027</td>
<td align="center" valign="middle">0.105</td>
<td align="center" valign="middle">0.901</td>
<td align="center" valign="middle">0.001</td>
</tr>
<tr>
<td align="left" valign="top">Latency of first fixation for mouth AOI</td>
<td align="center" valign="middle">21.329</td>
<td align="center" valign="middle">0.000</td>
<td align="center" valign="middle">0.027</td>
<td align="center" valign="middle">11.261</td>
<td align="center" valign="middle">0.000</td>
<td align="center" valign="middle">0.081</td>
<td align="center" valign="middle">4.687</td>
<td align="center" valign="middle">0.031</td>
<td align="center" valign="middle">0.018</td>
<td align="center" valign="middle">1.607</td>
<td align="center" valign="middle">0.203</td>
<td align="center" valign="middle">0.012</td>
</tr>
<tr>
<td align="left" valign="top">Look proportion of eye AOI</td>
<td align="center" valign="middle">4.819</td>
<td align="center" valign="middle">0.029</td>
<td align="center" valign="middle">0.048</td>
<td align="center" valign="middle">4.029</td>
<td align="center" valign="middle">0.019</td>
<td align="center" valign="middle">0.031</td>
<td align="center" valign="middle">10.075</td>
<td align="center" valign="middle">0.002</td>
<td align="center" valign="middle">0.038</td>
<td align="center" valign="middle">1.373</td>
<td align="center" valign="middle">0.255</td>
<td align="center" valign="middle">0.011</td>
</tr>
<tr>
<td align="left" valign="top">Look proportion of mouth AOI</td>
<td align="center" valign="middle">11.791</td>
<td align="center" valign="middle">0.001</td>
<td align="center" valign="middle">0.033</td>
<td align="center" valign="middle">8.822</td>
<td align="center" valign="middle">0.000</td>
<td align="center" valign="middle">0.065</td>
<td align="center" valign="middle">1.034</td>
<td align="center" valign="middle">0.869</td>
<td align="center" valign="middle">0.000</td>
<td align="center" valign="middle">1.034</td>
<td align="center" valign="middle">0.357</td>
<td align="center" valign="middle">0.008</td>
</tr>
<tr>
<td align="left" valign="top">Accuracy</td>
<td align="center" valign="middle">27.654</td>
<td align="center" valign="middle">0.000</td>
<td align="center" valign="middle">0.036</td>
<td align="center" valign="middle">5.286</td>
<td align="center" valign="middle">0.000</td>
<td align="center" valign="middle">0.062</td>
<td align="center" valign="middle">0.745</td>
<td align="center" valign="middle">0.022</td>
<td align="center" valign="middle">0.020</td>
<td align="center" valign="middle">0.745</td>
<td align="center" valign="middle">0.476</td>
<td align="center" valign="middle">0.006</td>
</tr>
<tr>
<td align="left" valign="top">Reaction time</td>
<td align="center" valign="middle">49.430</td>
<td align="center" valign="middle">0.000</td>
<td align="center" valign="middle">0.060</td>
<td align="center" valign="middle">10.539</td>
<td align="center" valign="middle">0.001</td>
<td align="center" valign="middle">0.055</td>
<td align="center" valign="middle">1.136</td>
<td align="center" valign="middle">0.001</td>
<td align="center" valign="middle">0.040</td>
<td align="center" valign="middle">1.136</td>
<td align="center" valign="middle">0.323</td>
<td align="center" valign="middle">0.009</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The main effect of GROUP was not significant in the 2&#x2009;&#x00D7;&#x2009;3 ANCOVA with the LFF as the dependent variable, but it was significant in the ANCOVA with the LFF of Eye AOI [<italic>F</italic>(1, 85)&#x2009;=&#x2009;17.519, <italic>p</italic>&#x2009;&#x003C;&#x2009;0.001, &#x03B7;<sup>2</sup>p&#x2009;=&#x2009;0.016] and LFF of Mouth AOI [<italic>F</italic>(1, 85)&#x2009;=&#x2009;21.329, <italic>p</italic>&#x2009;&#x003C;&#x2009;0.001, &#x03B7;<sup>2</sup>p&#x2009;=&#x2009;0.027]. The first fixation latency of Eye AOI and Mouth AOI in DP group was significantly longer than HC group.</p>
<p>In the ANCOVA analysis of look proportion, the main effects of GROUP were significant both in Eye AOI [<italic>F</italic>(1, 85) =4.819, <italic>p</italic>&#x2009;=&#x2009;0.029, &#x03B7;<sup>2</sup>p&#x2009;=&#x2009;0.048] and Mouth AOI [<italic>F</italic>(1, 85) =11.791, <italic>p</italic>&#x2009;=&#x2009;0.001, &#x03B7;<sup>2</sup>p&#x2009;=&#x2009;0.033], the post-hoc test showed that the proportion of Mouth AOI in DP group is significantly lower than HC group.</p>
<p>The main effect of GROUP was also significant in the analysis of Accuracy [<italic>F</italic>(1, 85) =27.654, <italic>p</italic>&#x2009;&#x003C;&#x2009;0.001, &#x03B7;<sup>2</sup>p&#x2009;=&#x2009;0.036] and Reaction Time [<italic>F</italic>(1, 85) =49.430, <italic>p</italic>&#x2009;&#x003C;&#x2009;0.001, &#x03B7;<sup>2</sup>p&#x2009;=&#x2009;0.060], the <italic>post-hoc</italic> test showed that the accuracy of EFRT in DP group was significantly lower than HC group, and the reaction time in DP group was significantly longer than HC group.</p>
</sec>
</sec>
<sec sec-type="discussion" id="sec28">
<label>4</label>
<title>Discussion</title>
<sec id="sec29">
<label>4.1</label>
<title>Avoidance of recognition cues in early attention</title>
<p>We found some interesting results on the latency of first fixation. For the whole picture (<xref ref-type="fig" rid="fig3">Figure 3a</xref>), there was no difference between the DP group and the HC group, but there were some differences when the latency of the first fixation in the different regions of interest was analyzed, people with depression pay attention to the pictures at the same time as healthy people, but they pay attention to the features later than healthy people.</p>
<p>Previous studies have shown that most of the implementation of emotional face recognition is focused on the eyes and mouth (<xref ref-type="bibr" rid="ref6">Eisenbarth and Alpers, 2011</xref>). The eye is a major cue for recognizing negative emotions (<xref ref-type="bibr" rid="ref8">Franca et al., 2023</xref>; <xref ref-type="bibr" rid="ref10">Grainger and Henry, 2020</xref>), the latency of the first fixation of the eye was greater in the DP Group than in the HC Group, suggesting that depressed patients are slower to notice the eye site when recognizing expressions, as can be seen from <xref ref-type="fig" rid="fig3">Figure 3b</xref>, especially when judging positive and negative faces, the latency of the first fixation point of &#x201C;Eyes&#x201D; was significantly greater than that of the healthy group, this suggests that people with depression have significantly more avoidance of the eye region when it comes to recognizing faces with distinct facial expressions than healthy people. This is consistent with findings from a study on the effects of depressive tendencies on eye gaze in social interactions. The study found that while depressive tendencies do not affect attention to others&#x2019; faces as a whole, they do influence attention to others&#x2019; eyes, suggesting an avoidance of the eye region among individuals with depressive tendencies (<xref ref-type="bibr" rid="ref27">Suslow et al., 2024</xref>). Furthermore, a comparison within the depression group revealed that the first fixation latency on the eye region was significantly longer for positive emotional faces than for neutral images. This suggests a stronger avoidance of the eye region in response to positive emotional faces among individuals with depression. It may also be due to the role of the eyes as a primary cue for recognizing negative emotions, which could counterbalance avoidance tendencies in response to negative faces, thus making avoidance less prominent for positive faces. Similarly, the first fixation latency on the mouth was longer in the depression group than in the healthy control group, indicating a slower attention shift to the mouth region when identifying emotional faces compared to healthy individuals. As shown in <xref ref-type="fig" rid="fig3">Figure 3c</xref>, this delay is particularly evident in responses to neutral and negative expressions, which may be attributed to the mouth&#x2019;s role as a key indicator of positive emotional expressions (<xref ref-type="bibr" rid="ref10">Grainger and Henry, 2020</xref>). When judging positive emotional faces, people typically fixate on the mouth first, which might explain why differences in first fixation latency on the mouth between the depression and control groups are less pronounced for positive faces but more noticeable in neutral and negative conditions (<xref ref-type="fig" rid="fig4">Figures 4</xref>, <xref ref-type="fig" rid="fig5">5</xref>).</p>
<fig position="float" id="fig3">
<label>Figure 3</label>
<caption>
<p>(a&#x2013;c) Latency of first fixation in EFRT(s). (d,e) Look proportion of AOIs (eye and mouth) in EFRT. (f,g) Accuracy and reaction time(s) of EFRT. &#x002A;&#x002A;&#x002A;<italic>p</italic>&#x2009;&#x003C;&#x2009;0.001; &#x002A;&#x002A;<italic>p</italic>&#x2009;&#x003C;&#x2009;0.01; &#x002A;<italic>p</italic>&#x2009;&#x003C;&#x2009;0.05; ns, non-significant.</p>
</caption>
<graphic xlink:href="fnins-18-1482849-g003.tif"/>
</fig>
<fig position="float" id="fig4">
<label>Figure 4</label>
<caption>
<p>Fixation point heat map. Larger size of the fixation point represents longer fixation time, and the higher the color heat, the higher the density.</p>
</caption>
<graphic xlink:href="fnins-18-1482849-g004.tif"/>
</fig>
<fig position="float" id="fig5">
<label>Figure 5</label>
<caption>
<p>Eye trace images of DP and HC groups. The number in circle represents the order of fixation points, the size of the circle represents the duration of fixation, and the larger the circle, the longer the duration.</p>
</caption>
<graphic xlink:href="fnins-18-1482849-g005.tif"/>
</fig>
<p>The relevant LFF indicators reveal that, in the early attention stage, individuals with depression do not show a significant difference in response to stimulus images compared to healthy controls. However, they exhibit a certain delay in focusing on effective facial expression cues. This phenomenon reflects, to some extent, a slowing in attention direction and a reduction in efficiency for emotion recognition tasks among individuals with depression.</p>
</sec>
<sec id="sec30">
<label>4.2</label>
<title>Altered processing patterns in mid-to-late attention stages and facial recognition deficits</title>
<p>The look proportion is derived from fixation duration, which reflects the difficulty participants experience in completing the task&#x2014;the longer the duration, the greater the difficulty (<xref ref-type="bibr" rid="ref9">Goller et al., 2019</xref>). Some discrepancies exist between prior studies and the findings of this study. Most studies suggest that individuals experiencing sadness tend to focus less on the eyes during facial recognition (<xref ref-type="bibr" rid="ref12">Hills and Lewis, 2011</xref>), and individuals with depression engage in less eye contact during conversations (<xref ref-type="bibr" rid="ref7">Fiquer et al., 2018</xref>). In social interactions, eye contact represents crucial information in the dialogue, conveying social interest and closeness (<xref ref-type="bibr" rid="ref2">Cui et al., 2019</xref>). By diverting their gaze from others&#x2019; eyes, individuals with depression may avoid deeper social engagement (<xref ref-type="bibr" rid="ref11">Hames et al., 2013</xref>). The observed differences may be attributed to the specificity of this experimental task. Studies that use look proportion as a measure typically employ a free-viewing paradigm, where participants have ample time to view images. However, in this study, participants were required to respond to stimulus images as quickly as possible, with the images disappearing immediately after the response. Since facial expression recognition generally begins with the eyes and then moves to the mouth (<xref ref-type="bibr" rid="ref34">Xue and Yantao, 2007</xref>), it is possible that limited viewing time in this study resulted in a focus advantage for the eyes as the first facial feature noticed. Given that both the depressed and healthy groups were under the same task conditions, a more plausible explanation may be an altered facial recognition pattern among individuals with depression.</p>
<p>In this study, <xref ref-type="fig" rid="fig3">Figures 3d</xref>,<xref ref-type="fig" rid="fig3">e</xref> reveal that individuals with depression show a higher look proportion on the eyes but a significantly lower look proportion on the mouth compared to the healthy control group. This suggests that during the task, participants in the depression group attempt to gather cues more from the eyes and less from the mouth. Research on facial expression recognition patterns in healthy individuals indicates a general preference to seek emotional cues from the mouth, with the eyes providing comparatively less prominent cues (<xref ref-type="bibr" rid="ref30">Wang et al., 2011</xref>). Thus, these findings may suggest that individuals with depression have an altered facial expression recognition pattern compared to healthy individuals. When considering response time and accuracy, the accuracy rate in the depression group was lower than that in the healthy control group, while response times were longer. This indicates that individuals with depression are not only slower in recognizing emotional faces but also less accurate. These results imply that a facial emotion recognition pattern focused more heavily on the eyes is less effective for individuals with depression than for healthy individuals, highlighting deficits in emotional face recognition among those with depression compared to healthy controls.</p>
</sec>
</sec>
<sec sec-type="conclusions" id="sec31">
<label>5</label>
<title>Conclusion</title>
<p>Individuals with depression display certain disadvantages and deficits in attention characteristics during emotional face recognition tasks compared to healthy individuals. Specifically, depressive participants exhibit increased first fixation latency to key facial cue areas (eyes and mouth), indicating an avoidance of effective cues in facial recognition. They also show a decreased look proportion on the mouth and an increased look proportion on the eyes, accompanied by reduced accuracy and prolonged response times. These findings suggest changes in cognitive patterns and deficits in facial recognition during emotional processing in individuals with depression. The results of this study support the hypothesis that individuals with depression have cognitive impairments and facial recognition deficits, offering an attention-based approach to exploring clinical diagnostic markers for depression.</p>
</sec>
<sec id="sec32">
<label>6</label>
<title>Limitations and future directions</title>
<p>In recruiting healthy controls and screening them using scales, the healthy participants&#x2019; ages were relatively concentrated, creating a disparity with the experimental group. Future research should increase the sample size to match the ages of the healthy control group more closely with the experimental group. What is more important is that the conclusions of this study need further validation in future research.</p>
</sec>
</body>
<back>
<sec sec-type="data-availability" id="sec33">
<title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec sec-type="ethics-statement" id="sec34">
<title>Ethics statement</title>
<p>The studies involving humans were approved by the Ethics Committee of the Institute of Clinical Basic Medicine, China Academy of Chinese Medical Sciences. The studies were conducted in accordance with the local legislation and institutional requirements. The participants provided their written informed consent to participate in this study. Written informed consent was obtained from the individual(s) for the publication of any potentially identifiable images or data included in this article.</p>
</sec>
<sec sec-type="author-contributions" id="sec35">
<title>Author contributions</title>
<p>QiaY: Conceptualization, Data curation, Formal analysis, Investigation, Methodology, Project administration, Validation, Visualization, Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing. YF: Conceptualization, Data curation, Formal analysis, Investigation, Methodology, Project administration, Validation, Visualization, Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing. QiuY: Conceptualization, Funding acquisition, Resources, Supervision, Writing &#x2013; review &#x0026; editing. DY: Conceptualization, Data curation, Investigation, Methodology, Project administration, Resources, Supervision, Validation, Writing &#x2013; review &#x0026; editing. YZ: Conceptualization, Resources, Supervision, Writing &#x2013; review &#x0026; editing. HW: Funding acquisition, Supervision, Writing &#x2013; review &#x0026; editing. HZ: Investigation, Writing &#x2013; review &#x0026; editing, Validation. YS: Investigation, Writing &#x2013; review &#x0026; editing. XX: Investigation, Writing &#x2013; review &#x0026; editing. JD: Conceptualization, Funding acquisition, Investigation, Methodology, Project administration, Resources, Supervision, Validation, Visualization, Writing &#x2013; review &#x0026; editing.</p>
</sec>
<sec sec-type="funding-information" id="sec36">
<title>Funding</title>
<p>The author(s) declare that financial support was received for the research, authorship, and/or publication of this article. This work was supported by the Beijing Natural Science Foundation (No. 7232327), Study on the Characteristics of NIR Brain Imaging in Depressive Patients with Deficiency and Excess TCM Syndrome after Resting-state and Emotional Activation, and the Excellent Young Talent Cultivation Program of China Academy of Chinese Medicine (No. ZZ16-YQ-059).</p>
</sec>
<sec sec-type="COI-statement" id="sec37">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
<p>The reviewer XS declared a shared affiliation with the author YS to the handling editor at the time of review.</p>
</sec>
<sec sec-type="disclaimer" id="sec38">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="ref1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Byun</surname> <given-names>S.</given-names></name> <name><surname>Kim</surname> <given-names>A. Y.</given-names></name> <name><surname>Jang</surname> <given-names>E. H.</given-names></name> <name><surname>Kim</surname> <given-names>S.</given-names></name> <name><surname>Choi</surname> <given-names>K. W.</given-names></name> <name><surname>Yu</surname> <given-names>H. Y.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>Detection of major depressive disorder from linear and nonlinear heart rate variability features during mental task protocol</article-title>. <source>Comput. Biol. Med.</source> <volume>112</volume>:<fpage>103381</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.compbiomed.2019.103381</pub-id>, PMID: <pub-id pub-id-type="pmid">31404718</pub-id></citation></ref>
<ref id="ref2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cui</surname> <given-names>M.</given-names></name> <name><surname>Zhu</surname> <given-names>M.</given-names></name> <name><surname>Lu</surname> <given-names>X.</given-names></name> <name><surname>Zhu</surname> <given-names>L.</given-names></name></person-group> (<year>2019</year>). <article-title>Implicit perceptions of closeness from the direct eye gaze</article-title>. <source>Front. Psychol.</source> <volume>9</volume>:<fpage>2673</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2018.02673</pub-id>, PMID: <pub-id pub-id-type="pmid">30666227</pub-id></citation></ref>
<ref id="ref3"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cuijpers</surname> <given-names>P.</given-names></name> <name><surname>Noma</surname> <given-names>H.</given-names></name> <name><surname>Karyotaki</surname> <given-names>E.</given-names></name> <name><surname>Vinkers</surname> <given-names>C. H.</given-names></name> <name><surname>Cipriani</surname> <given-names>A.</given-names></name> <name><surname>Furukawa</surname> <given-names>T. A.</given-names></name></person-group> (<year>2020</year>). <article-title>A network meta-analysis of the effects of psychotherapies, pharmacotherapies and their combination in the treatment of adult depression</article-title>. <source>World Psychiatry</source> <volume>19</volume>, <fpage>92</fpage>&#x2013;<lpage>107</lpage>. doi: <pub-id pub-id-type="doi">10.1002/wps.20701</pub-id>, PMID: <pub-id pub-id-type="pmid">31922679</pub-id></citation></ref>
<ref id="ref4"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dalili</surname> <given-names>M. N.</given-names></name> <name><surname>Penton-Voak</surname> <given-names>I. S.</given-names></name> <name><surname>Harmer</surname> <given-names>C. J.</given-names></name> <name><surname>Munafo</surname> <given-names>M. R.</given-names></name></person-group> (<year>2015</year>). <article-title>Meta-analysis of emotion recognition deficits in major depressive disorder</article-title>. <source>Psychol. Med.</source> <volume>45</volume>, <fpage>1135</fpage>&#x2013;<lpage>1144</lpage>. doi: <pub-id pub-id-type="doi">10.1017/S0033291714002591</pub-id>, PMID: <pub-id pub-id-type="pmid">25395075</pub-id></citation></ref>
<ref id="ref5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Du</surname> <given-names>M.</given-names></name> <name><surname>Shuang</surname> <given-names>L.</given-names></name> <name><surname>Xiao-ya</surname> <given-names>L.</given-names></name> <name><surname>Wen-quan</surname> <given-names>Z.</given-names></name> <name><surname>Dong</surname> <given-names>M.</given-names></name></person-group> (<year>2022</year>). <article-title>Research progress of facial visual features in depression diagnosis</article-title>. <source>J. Chin. Comput. Syst.</source> <volume>43</volume>, <fpage>483</fpage>&#x2013;<lpage>489</lpage>. doi: <pub-id pub-id-type="doi">10.20009/j.cnki.21-1106/TP.2021-0545</pub-id></citation></ref>
<ref id="ref6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Eisenbarth</surname> <given-names>H.</given-names></name> <name><surname>Alpers</surname> <given-names>G. W.</given-names></name></person-group> (<year>2011</year>). <article-title>Happy mouth and sad eyes: scanning emotional facial expressions</article-title>. <source>Emotion</source> <volume>11</volume>, <fpage>860</fpage>&#x2013;<lpage>865</lpage>. doi: <pub-id pub-id-type="doi">10.1037/a0022758</pub-id>, PMID: <pub-id pub-id-type="pmid">21859204</pub-id></citation></ref>
<ref id="ref7"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fiquer</surname> <given-names>J. T.</given-names></name> <name><surname>Moreno</surname> <given-names>R. A.</given-names></name> <name><surname>Brunoni</surname> <given-names>A. R.</given-names></name> <name><surname>Barros</surname> <given-names>V. B.</given-names></name> <name><surname>Fernandes</surname> <given-names>F.</given-names></name> <name><surname>Gorenstein</surname> <given-names>C.</given-names></name></person-group> (<year>2018</year>). <article-title>What is the nonverbal communication of depression? Assessing expressive differences between depressive patients and healthy volunteers during clinical interviews</article-title>. <source>J. Affect. Disord.</source> <volume>238</volume>, <fpage>636</fpage>&#x2013;<lpage>644</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jad.2018.05.071</pub-id>, PMID: <pub-id pub-id-type="pmid">29957481</pub-id></citation></ref>
<ref id="ref8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Franca</surname> <given-names>M.</given-names></name> <name><surname>Bolognini</surname> <given-names>N.</given-names></name> <name><surname>Brysbaert</surname> <given-names>M.</given-names></name></person-group> (<year>2023</year>). <article-title>Seeing emotions in the eyes: a validated test to study individual differences in the perception of basic emotions</article-title>. <source>Cogn. Res. Princ. Impl.</source> <volume>8</volume>:<fpage>67</fpage>. doi: <pub-id pub-id-type="doi">10.1186/s41235-023-00521-x</pub-id>, PMID: <pub-id pub-id-type="pmid">37919608</pub-id></citation></ref>
<ref id="ref9"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Goller</surname> <given-names>J.</given-names></name> <name><surname>Mitrovic</surname> <given-names>A.</given-names></name> <name><surname>Leder</surname> <given-names>H.</given-names></name></person-group> (<year>2019</year>). <article-title>Effects of liking on visual attention in faces and paintings</article-title>. <source>Acta Psychol.</source> <volume>197</volume>, <fpage>115</fpage>&#x2013;<lpage>123</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.actpsy.2019.05.008</pub-id>, PMID: <pub-id pub-id-type="pmid">31146088</pub-id></citation></ref>
<ref id="ref10"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Grainger</surname> <given-names>S. A.</given-names></name> <name><surname>Henry</surname> <given-names>J. D.</given-names></name></person-group> (<year>2020</year>). <article-title>Gaze patterns to emotional faces throughout the adult lifespan</article-title>. <source>Psychol. Aging</source> <volume>35</volume>, <fpage>981</fpage>&#x2013;<lpage>992</lpage>. doi: <pub-id pub-id-type="doi">10.1037/pag0000571</pub-id>, PMID: <pub-id pub-id-type="pmid">32816505</pub-id></citation></ref>
<ref id="ref11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hames</surname> <given-names>J. L.</given-names></name> <name><surname>Hagan</surname> <given-names>C. R.</given-names></name> <name><surname>Joiner</surname> <given-names>T. E.</given-names></name></person-group> (<year>2013</year>). <article-title>Interpersonal processes in depression</article-title>. <source>Annu. Rev. Clin. Psychol.</source> <volume>9</volume>, <fpage>355</fpage>&#x2013;<lpage>377</lpage>. doi: <pub-id pub-id-type="doi">10.1146/annurev-clinpsy-050212-185553</pub-id></citation></ref>
<ref id="ref12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hills</surname> <given-names>P. J.</given-names></name> <name><surname>Lewis</surname> <given-names>M. B.</given-names></name></person-group> (<year>2011</year>). <article-title>Sad people avoid the eyes or happy people focus on the eyes? Mood induction affects facial feature discrimination</article-title>. <source>Br. J. Psychol.</source> <volume>102</volume>, <fpage>260</fpage>&#x2013;<lpage>274</lpage>. doi: <pub-id pub-id-type="doi">10.1348/000712610X519314</pub-id></citation></ref>
<ref id="ref13"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hollon</surname> <given-names>S. D.</given-names></name> <name><surname>Shelton</surname> <given-names>R. C.</given-names></name> <name><surname>Wisniewski</surname> <given-names>S.</given-names></name> <name><surname>Warden</surname> <given-names>D.</given-names></name> <name><surname>Biggs</surname> <given-names>M. M.</given-names></name> <name><surname>Friedman</surname> <given-names>E. S.</given-names></name> <etal/></person-group>. (<year>2006</year>). <article-title>Presenting characteristics of depressed outpatients as a function of recurrence: preliminary findings from the STAR&#x002A;D clinical trial</article-title>. <source>J. Psychiatr. Res.</source> <volume>40</volume>, <fpage>59</fpage>&#x2013;<lpage>69</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jpsychires.2005.07.008</pub-id>, PMID: <pub-id pub-id-type="pmid">16243357</pub-id></citation></ref>
<ref id="ref14"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Iancu</surname> <given-names>S. C.</given-names></name> <name><surname>Wong</surname> <given-names>Y. M.</given-names></name> <name><surname>Rhebergen</surname> <given-names>D.</given-names></name> <name><surname>van Balkom</surname> <given-names>A.</given-names></name> <name><surname>Batelaan</surname> <given-names>N. M.</given-names></name></person-group> (<year>2020</year>). <article-title>Long-term disability in major depressive disorder: a 6-year follow-up study</article-title>. <source>Psychol. Med.</source> <volume>50</volume>, <fpage>1644</fpage>&#x2013;<lpage>1652</lpage>. doi: <pub-id pub-id-type="doi">10.1017/S0033291719001612</pub-id>, PMID: <pub-id pub-id-type="pmid">31284881</pub-id></citation></ref>
<ref id="ref15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jiang</surname> <given-names>Y.</given-names></name></person-group> (<year>2024</year>). <article-title>A theory of the neural mechanisms underlying negative cognitive bias in major depression</article-title>. <source>Front. Psych.</source> <volume>15</volume>:<fpage>1348474</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyt.2024.1348474</pub-id>, PMID: <pub-id pub-id-type="pmid">38532986</pub-id></citation></ref>
<ref id="ref16"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Koenig</surname> <given-names>A. M.</given-names></name> <name><surname>Bhalla</surname> <given-names>R. K.</given-names></name> <name><surname>Butters</surname> <given-names>M. A.</given-names></name></person-group> (<year>2014</year>). <article-title>Cognitive functioning and late-life depression</article-title>. <source>J. Int. Neuropsychol. Soc.</source> <volume>20</volume>, <fpage>461</fpage>&#x2013;<lpage>467</lpage>. doi: <pub-id pub-id-type="doi">10.1017/S1355617714000198</pub-id>, PMID: <pub-id pub-id-type="pmid">24685173</pub-id></citation></ref>
<ref id="ref17"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Koller-Schlaud</surname> <given-names>K.</given-names></name> <name><surname>Strohle</surname> <given-names>A.</given-names></name> <name><surname>Barwolf</surname> <given-names>E.</given-names></name> <name><surname>Behr</surname> <given-names>J.</given-names></name> <name><surname>Rentzsch</surname> <given-names>J.</given-names></name></person-group> (<year>2020</year>). <article-title>EEG frontal asymmetry and Theta power in unipolar and bipolar depression</article-title>. <source>J. Affect. Disord.</source> <volume>276</volume>, <fpage>501</fpage>&#x2013;<lpage>510</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jad.2020.07.011</pub-id>, PMID: <pub-id pub-id-type="pmid">32871681</pub-id></citation></ref>
<ref id="ref18"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lee</surname> <given-names>R. S.</given-names></name> <name><surname>Hermens</surname> <given-names>D. F.</given-names></name> <name><surname>Porter</surname> <given-names>M. A.</given-names></name> <name><surname>Redoblado-Hodge</surname> <given-names>M. A.</given-names></name></person-group> (<year>2012</year>). <article-title>A meta-analysis of cognitive deficits in first-episode major depressive disorder</article-title>. <source>J. Affect. Disord.</source> <volume>140</volume>, <fpage>113</fpage>&#x2013;<lpage>124</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jad.2011.10.023</pub-id>, PMID: <pub-id pub-id-type="pmid">22088608</pub-id></citation></ref>
<ref id="ref19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Leppanen</surname> <given-names>J. M.</given-names></name> <name><surname>Milders</surname> <given-names>M.</given-names></name> <name><surname>Bell</surname> <given-names>J. S.</given-names></name> <name><surname>Terriere</surname> <given-names>E.</given-names></name> <name><surname>Hietanen</surname> <given-names>J. K.</given-names></name></person-group> (<year>2004</year>). <article-title>Depression biases the recognition of emotionally neutral faces</article-title>. <source>Psychiatry Res.</source> <volume>128</volume>, <fpage>123</fpage>&#x2013;<lpage>133</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.psychres.2004.05.020</pub-id>, PMID: <pub-id pub-id-type="pmid">15488955</pub-id></citation></ref>
<ref id="ref20"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Liu</surname> <given-names>M. F.</given-names></name> <name><surname>Huang</surname> <given-names>R. Z.</given-names></name> <name><surname>Xu</surname> <given-names>X. L.</given-names></name> <name><surname>Liu</surname> <given-names>Q. S.</given-names></name></person-group>. (<year>2015</year>). <article-title>Experimental manipulation of positive attention bias in remitted depression: Evidence from eye movements</article-title>. <source>Chinese Journal of Clinical Psychology</source> <volume>1</volume>, <fpage>48</fpage>&#x2013;<lpage>51</lpage>.</citation></ref>
<ref id="ref21"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Morin</surname> <given-names>R. T.</given-names></name> <name><surname>Nelson</surname> <given-names>C.</given-names></name> <name><surname>Bickford</surname> <given-names>D.</given-names></name> <name><surname>Insel</surname> <given-names>P. S.</given-names></name> <name><surname>Mackin</surname> <given-names>R. S.</given-names></name></person-group> (<year>2020</year>). <article-title>Somatic and anxiety symptoms of depression are associated with disability in late life depression</article-title>. <source>Aging Ment. Health</source> <volume>24</volume>, <fpage>1225</fpage>&#x2013;<lpage>1228</lpage>. doi: <pub-id pub-id-type="doi">10.1080/13607863.2019.1597013</pub-id>, PMID: <pub-id pub-id-type="pmid">30945553</pub-id></citation></ref>
<ref id="ref22"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Pan</surname> <given-names>Z.</given-names></name> <name><surname>Ma</surname> <given-names>H.</given-names></name> <name><surname>Zhang</surname> <given-names>L.</given-names></name> <name><surname>Wang</surname> <given-names>Y.</given-names></name></person-group> (<year>2019</year>). <source>Depression detection based on reaction time and eye movement. 2019 IEEE international conference on image Processiong (ICIP)</source>. <publisher-loc>Taipei</publisher-loc>.</citation></ref>
<ref id="ref23"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Qian</surname> <given-names>H.</given-names></name> <name><surname>Lei</surname> <given-names>Z.</given-names></name> <name><surname>Yuqian</surname> <given-names>Q.</given-names></name></person-group> (<year>2019</year>). <article-title>A study on depression Patients' eye tracking of attention Bias on emotional pictures</article-title>. <source>Heihe Xueyuan Xuebao</source> <volume>10</volume>, <fpage>207</fpage>&#x2013;<lpage>209</lpage>.</citation></ref>
<ref id="ref24"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rushia</surname> <given-names>S. N.</given-names></name> <name><surname>Shehab</surname> <given-names>A.</given-names></name> <name><surname>Motter</surname> <given-names>J. N.</given-names></name> <name><surname>Egglefield</surname> <given-names>D. A.</given-names></name> <name><surname>Schiff</surname> <given-names>S.</given-names></name> <name><surname>Sneed</surname> <given-names>J. R.</given-names></name> <etal/></person-group>. (<year>2020</year>). <article-title>Vascular depression for radiology: a review of the construct, methodology, and diagnosis</article-title>. <source>World J. Radiol.</source> <volume>12</volume>, <fpage>48</fpage>&#x2013;<lpage>67</lpage>. doi: <pub-id pub-id-type="doi">10.4329/wjr.v12.i5.48</pub-id>, PMID: <pub-id pub-id-type="pmid">32549954</pub-id></citation></ref>
<ref id="ref25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sfarlea</surname> <given-names>A.</given-names></name> <name><surname>Greimel</surname> <given-names>E.</given-names></name> <name><surname>Platt</surname> <given-names>B.</given-names></name> <name><surname>Dieler</surname> <given-names>A. C.</given-names></name> <name><surname>Schulte-Korne</surname> <given-names>G.</given-names></name></person-group> (<year>2018</year>). <article-title>Recognition of emotional facial expressions in adolescents with anorexia nervosa and adolescents with major depression</article-title>. <source>Psychiatry Res.</source> <volume>262</volume>, <fpage>586</fpage>&#x2013;<lpage>594</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.psychres.2017.09.048</pub-id>, PMID: <pub-id pub-id-type="pmid">28965808</pub-id></citation></ref>
<ref id="ref26"><citation citation-type="confproc"><person-group person-group-type="author"><name><surname>Shen</surname> <given-names>R.</given-names></name> <name><surname>Zhan</surname> <given-names>Q.</given-names></name> <name><surname>Wang</surname> <given-names>Y.</given-names></name> <name><surname>Ma</surname> <given-names>H.</given-names></name></person-group> (<year>2021</year>). <article-title>Depression detection by analysing eye movements on emotional images</article-title>. <conf-name>Paper Presented at the ICASSP 2021&#x2013;2021 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)</conf-name>, <publisher-loc>Toronto, ON</publisher-loc>.</citation></ref>
<ref id="ref27"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Suslow</surname> <given-names>T.</given-names></name> <name><surname>Hoepfel</surname> <given-names>D.</given-names></name> <name><surname>Kersting</surname> <given-names>A.</given-names></name> <name><surname>Bodenschatz</surname> <given-names>C. M.</given-names></name></person-group> (<year>2024</year>). <article-title>Depressive symptoms and visual attention to others&#x2019; eyes in healthy individuals</article-title>. <source>BMC Psychiatry</source> <volume>24</volume>:<fpage>184</fpage>. doi: <pub-id pub-id-type="doi">10.1186/s12888-024-05633-2</pub-id>, PMID: <pub-id pub-id-type="pmid">38448877</pub-id></citation></ref>
<ref id="ref28"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Thoduparambil</surname> <given-names>P. P.</given-names></name> <name><surname>Dominic</surname> <given-names>A.</given-names></name> <name><surname>Varghese</surname> <given-names>S. M.</given-names></name></person-group> (<year>2020</year>). <article-title>EEG-based deep learning model for the automatic detection of clinical depression</article-title>. <source>Phys. Eng. Sci. Med.</source> <volume>43</volume>, <fpage>1349</fpage>&#x2013;<lpage>1360</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s13246-020-00938-4</pub-id>, PMID: <pub-id pub-id-type="pmid">33090373</pub-id></citation></ref>
<ref id="ref29"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wagner</surname> <given-names>S.</given-names></name> <name><surname>Doering</surname> <given-names>B.</given-names></name> <name><surname>Helmreich</surname> <given-names>I.</given-names></name> <name><surname>Lieb</surname> <given-names>K.</given-names></name> <name><surname>Tadic</surname> <given-names>A.</given-names></name></person-group> (<year>2012</year>). <article-title>A meta-analysis of executive dysfunctions in unipolar major depressive disorder without psychotic symptoms and their changes during antidepressant treatment</article-title>. <source>Acta Psychiatr. Scand.</source> <volume>125</volume>, <fpage>281</fpage>&#x2013;<lpage>292</lpage>. doi: <pub-id pub-id-type="doi">10.1111/j.1600-0447.2011.01762.x</pub-id>, PMID: <pub-id pub-id-type="pmid">22007857</pub-id></citation></ref>
<ref id="ref30"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>L.</given-names></name> <name><surname>Qian</surname> <given-names>E.</given-names></name> <name><surname>Zhang</surname> <given-names>Q.</given-names></name> <name><surname>Pan</surname> <given-names>F.</given-names></name></person-group> (<year>2011</year>). <article-title>Eyes clue effect in facial expression recognition</article-title>. <source>J. Educ. Sci. Hun. Norm. Univ.</source> <volume>10</volume>, <fpage>115</fpage>&#x2013;<lpage>119</lpage>. doi: <pub-id pub-id-type="doi">10.3969/j.issn.1671-6124.2011.06.029</pub-id></citation></ref>
<ref id="ref31"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>Q.</given-names></name> <name><surname>Yang</surname> <given-names>H.</given-names></name> <name><surname>Yu</surname> <given-names>Y.</given-names></name></person-group> (<year>2018</year>). <article-title>Facial expression video analysis for depression detection in Chinese patients</article-title>. <source>J. Vis. Commun. Image Represent.</source> <volume>57</volume>, <fpage>228</fpage>&#x2013;<lpage>233</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jvcir.2018.11.003</pub-id></citation></ref>
<ref id="ref32"><citation citation-type="other"><person-group person-group-type="author"><collab id="coll1">World Health Organization</collab></person-group>. (<year>2024</year>). <source>COVID-19 pandemic triggers 25% increase in prevalence of anxiety and depression worldwide</source>.</citation></ref>
<ref id="ref33"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Xing</surname> <given-names>Y.</given-names></name> <name><surname>Rao</surname> <given-names>N.</given-names></name> <name><surname>Miao</surname> <given-names>M.</given-names></name> <name><surname>Li</surname> <given-names>Q.</given-names></name> <name><surname>Li</surname> <given-names>Q.</given-names></name> <name><surname>Chen</surname> <given-names>X.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>Task-state heart rate variability parameter-based depression detection model and effect of therapy on the parameters</article-title>. <source>IEEE Access</source> <volume>7</volume>, <fpage>105701</fpage>&#x2013;<lpage>105709</lpage>. doi: <pub-id pub-id-type="doi">10.1109/ACCESS.2019.2932393</pub-id></citation></ref>
<ref id="ref34"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Xue</surname> <given-names>S.</given-names></name> <name><surname>Yantao</surname> <given-names>R.</given-names></name></person-group> (<year>2007</year>). <article-title>Online processing of facial expression recognition</article-title>. <source>Acta Psychol. Sin.</source> <volume>39</volume>, <fpage>64</fpage>&#x2013;<lpage>70</lpage>.</citation></ref>
<ref id="ref9001"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Xu</surname> <given-names>G.</given-names></name> <name><surname>Yu-Xia</surname> <given-names>H.</given-names></name> <name><surname>Yan</surname> <given-names>W.</given-names></name> <name><surname>Yue-Jia</surname> <given-names>L.</given-names></name></person-group> (<year>2011</year>). <article-title>Revision of the Chinese Facial Affective Picture System</article-title>. <source>Chinese Mental Health Journal</source>, <volume>25</volume>, <fpage>40</fpage>&#x2013;<lpage>46</lpage>.</citation></ref>
<ref id="ref35"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Yan</surname> <given-names>D. F.</given-names></name> <name><surname>Li</surname> <given-names>L. J.</given-names></name></person-group> (<year>2018</year>). <article-title>Cognitive dysfunction in patients with major depressive disorder and its clinical implications (in Chinese)</article-title>. <source>Chin. J. Psychiatry</source> <volume>51</volume>, <fpage>343</fpage>&#x2013;<lpage>346</lpage>. doi: <pub-id pub-id-type="doi">10.3760/cma.j.issn.1006-7884.2018.05.012</pub-id></citation></ref>
</ref-list>
</back>
</article>