<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article article-type="research-article" dtd-version="1.3" xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Robot. AI</journal-id>
<journal-title-group>
<journal-title>Frontiers in Robotics and AI</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Robot. AI</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">2296-9144</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">1782839</article-id>
<article-id pub-id-type="doi">10.3389/frobt.2026.1782839</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Original Research</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Effects of praise from a social robot on task persistence in 18- to 24-month-old children</article-title>
<alt-title alt-title-type="left-running-head">Ishibashi et al.</alt-title>
<alt-title alt-title-type="right-running-head">
<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2026.1782839">10.3389/frobt.2026.1782839</ext-link>
</alt-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Ishibashi</surname>
<given-names>Mikako</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="corresp" rid="c001">&#x2a;</xref>
<uri xlink:href="https://loop.frontiersin.org/people/402587"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal Analysis</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing - original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Shinya</surname>
<given-names>Yuta</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/452311"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal Analysis</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing - original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Yoshikawa</surname>
<given-names>Yuichiro</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Ishiguro</surname>
<given-names>Hiroshi</given-names>
</name>
<xref ref-type="aff" rid="aff4">
<sup>4</sup>
</xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Itakura</surname>
<given-names>Shoji</given-names>
</name>
<xref ref-type="aff" rid="aff5">
<sup>5</sup>
</xref>
<xref ref-type="aff" rid="aff6">
<sup>6</sup>
</xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
</contrib>
</contrib-group>
<aff id="aff1">
<label>1</label>
<institution>Department of Psychology and Humanities, College of Sociology, Edogawa University</institution>, <city>Chiba</city>, <country country="JP">Japan</country>
</aff>
<aff id="aff2">
<label>2</label>
<institution>Graduate School of Education, The University of Tokyo</institution>, <city>Tokyo</city>, <country country="JP">Japan</country>
</aff>
<aff id="aff3">
<label>3</label>
<institution>Graduate School of Engineering Science, Osaka University</institution>, <city>Osaka</city>, <country country="JP">Japan</country>
</aff>
<aff id="aff4">
<label>4</label>
<institution>Department of Systems Innovation, Osaka University</institution>, <city>Osaka</city>, <country country="JP">Japan</country>
</aff>
<aff id="aff5">
<label>5</label>
<institution>Center for Baby Science, Doshisha University</institution>, <city>Kyoto</city>, <country country="JP">Japan</country>
</aff>
<aff id="aff6">
<label>6</label>
<institution>Research Organization of Open Innovation and Collaboration, Ritsumeikan University</institution>, <city>Osaka</city>, <country country="JP">Japan</country>
</aff>
<author-notes>
<corresp id="c001">
<label>&#x2a;</label>Correspondence: Mikako Ishibashi, <email xlink:href="mailto:m.ishibashi.k@gmail.com">m.ishibashi.k@gmail.com</email>
</corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-02-27">
<day>27</day>
<month>02</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2026</year>
</pub-date>
<volume>13</volume>
<elocation-id>1782839</elocation-id>
<history>
<date date-type="received">
<day>07</day>
<month>01</month>
<year>2026</year>
</date>
<date date-type="rev-recd">
<day>06</day>
<month>02</month>
<year>2026</year>
</date>
<date date-type="accepted">
<day>11</day>
<month>02</month>
<year>2026</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2026 Ishibashi, Shinya, Yoshikawa, Ishiguro and Itakura.</copyright-statement>
<copyright-year>2026</copyright-year>
<copyright-holder>Ishibashi, Shinya, Yoshikawa, Ishiguro and Itakura</copyright-holder>
<license>
<ali:license_ref start_date="2026-02-27">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<sec>
<title>Introduction</title>
<p>Social robots are increasingly being integrated into children&#x2019;s daily lives, shaping their social interactions and learning behaviors. However, no study has empirically investigated the effect of robot-administered praise in children younger than 4 years old.</p>
</sec>
<sec>
<title>Method</title>
<p>This study focuses on the social robot CommU, a simple, approximately 30 cm tall, child-shaped robot that exerts less social pressure and helps children attend to social cues more easily. We examined whether praise from CommU is associated with task persistence in children aged 18&#x2013;24 months, in comparison with human praise.</p>
</sec>
<sec>
<title>Result</title>
<p>Children showed greater task persistence in the Praise condition than in the No Praise condition, regardless of agent type (CommU vs. Human). In addition, children&#x2019;s task persistence was positively associated with the amount of time they spent looking at the agent.</p>
</sec>
<sec>
<title>Discussion</title>
<p>These findings suggest that praise delivered by a social robot is associated with greater task persistence in children aged 18&#x2013;24 months. Additionally, the positive association between task persistence and time spent looking at the agent suggests that children&#x2019;s social attention may contribute to sustained engagement during the task. More broadly, the results point to the possibility that social robots may be relevant to aspects of early childhood engagement, beyond the specific task-persistence behavior examined in this study.</p>
</sec>
</abstract>
<kwd-group>
<kwd>CommU robot</kwd>
<kwd>persistence</kwd>
<kwd>praise effect</kwd>
<kwd>look</kwd>
<kwd>toddlers (18&#x2013;24 months)</kwd>
</kwd-group>
<funding-group>
<funding-statement>The author(s) declared that financial support was received for this work and/or its publication. This study was supported by the MEXT &#x201c;Innovation Platform for Society 5.0&#x201d; (Grant number: JPMXP0518071489).</funding-statement>
</funding-group>
<counts>
<fig-count count="3"/>
<table-count count="3"/>
<equation-count count="0"/>
<ref-count count="45"/>
<page-count count="00"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Human-Robot Interaction</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="s1">
<label>1</label>
<title>Introduction</title>
<p>Social robots, defined as robots that can interact socially and communicate with humans and other autonomous physical entities (<xref ref-type="bibr" rid="B12">Fong et al., 2003</xref>), are being introduced into children&#x2019;s daily lives (<xref ref-type="bibr" rid="B13">Fridin, 2014</xref>; <xref ref-type="bibr" rid="B44">Westlund et al., 2018</xref>; <xref ref-type="bibr" rid="B45">Yu and Roque, 2019</xref>; <xref ref-type="bibr" rid="B37">Shiomi, 2024</xref>), and their presence in homes and classrooms is expected to increase rapidly (<xref ref-type="bibr" rid="B6">Belpaeme et al., 2018</xref>; <xref ref-type="bibr" rid="B15">Johal, 2020</xref>; <xref ref-type="bibr" rid="B43">van Straten et al., 2020</xref>). The various types of social robots include androids, dolls, and animal-like designs, each capable of eliciting beneficial social behaviors from children (<xref ref-type="bibr" rid="B30">Papadopoulos et al., 2020</xref>; <xref ref-type="bibr" rid="B28">Nijssen et al., 2021</xref>; <xref ref-type="bibr" rid="B39">Sommer et al., 2021</xref>).</p>
<p>Robots are more likely to be perceived themselves as social beings when they respond appropriately to children&#x2019;s actions and words (<xref ref-type="bibr" rid="B23">Leite et al., 2013</xref>), behave like friends (<xref ref-type="bibr" rid="B17">Kanda et al., 2004</xref>), or express emotions (<xref ref-type="bibr" rid="B14">Hoffman and Ju, 2014</xref>). <xref ref-type="bibr" rid="B22">Kumazaki et al. (2020)</xref> suggested that when social robots are introduced to young children, small-sized robots with simple designs are preferred. CommU (Vstone Co., Ltd.) is a relatively simple robot that lacks a human-like appearance but has a child-like form, and is approximately 30 cm tall (<xref ref-type="bibr" rid="B42">Uchida et al., 2020</xref>). Considering its simple design, the CommU conveys less information than Android robots and is less likely to create social pressure, allowing users to focus more on social cues (<xref ref-type="bibr" rid="B19">Kumazaki et al., 2018a</xref>).</p>
<p>Several studies have demonstrated the effectiveness of CommUs in promoting social communication. For instance, conversations with CommUs have facilitated self-disclosure among users (<xref ref-type="bibr" rid="B42">Uchida et al., 2020</xref>). Individuals with autism-spectrum disorder (ASD) felt more comfortable communicating with the CommU than with humans (<xref ref-type="bibr" rid="B20">Kumazaki et al., 2018b</xref>), suggesting that the CommU may be useful in fostering relationships and providing psychological support. CommU has been applied in learning, where training via the CommU improved joint attention (JA) in children, leading to enhanced performance in subsequent JA tasks with humans (<xref ref-type="bibr" rid="B21">Kumazaki et al., 2019</xref>). Thus, the CommU has proven to be an effective tool for supporting children&#x2019;s learning.</p>
<p>One specific type of support that social robots can provide for children&#x2019;s social learning is praise. <xref ref-type="bibr" rid="B38">Shiomi et al. (2021)</xref> examined the effects of praise received from two robots on children&#x2019;s learning. Praise is a fundamental social reward that motivates young children to achieve their goals and fosters their desire to learn (<xref ref-type="bibr" rid="B25">Leonard et al., 2021</xref>). <xref ref-type="bibr" rid="B38">Shiomi et al. (2021)</xref> compared the duration of English learning sessions among children aged 4&#x2013;6 years under two conditions: one versus two robots praising the children. When two robots praised the children, they spent significantly more time learning English than when only one robot praised them, suggesting that praise from robots encourages persistent learning among young children.</p>
<p>Similarly, other studies have examined the effects of robot-administered praise (<xref ref-type="bibr" rid="B2">Akalin et al., 2019</xref>; <xref ref-type="bibr" rid="B8">Davison et al., 2021</xref>; <xref ref-type="bibr" rid="B11">Fasola and Matari&#x107;, 2010</xref>). In these studies, praise was delivered by physically embodied social robots that interacted with users through speech and simple social behaviors such as gaze and gesture. The robots provided encouraging or positive feedback during task performance, often emphasizing effort, engagement, or successful task completion in order to enhance motivation and learning-related outcomes. However, all these studies focused on school-aged or older children (or adults), and little is known about how robot-delivered praise functions in children under 4 years old. Collecting sufficient research data on robots interacting with children under 4 years old is challenging, because of their strong resistance to robots (<xref ref-type="bibr" rid="B20">Kumazaki et al., 2018b</xref>). Consequently, with the exception of <xref ref-type="bibr" rid="B40">Tanaka et al. (2007)</xref>, few studies have explored the interactions between robots and developing children aged 1 or 2 years. <xref ref-type="bibr" rid="B40">Tanaka et al. (2007)</xref> placed a robot in a nursery room with children aged 18&#x2013;24 months over a five-month period. The robot (QRIO) was relatively small (58 cm tall) and its facial structure was similar to that of the CommU, with cameras mounted above both eyes. The robot which was programmed to provide contingent responses to the children over 45 sessions, improved child&#x2013;robot interaction quality over time, including caretaking behavior and conversation. These findings suggest that even children as young as 18&#x2013;24 months old can establish social interactions with robots. However, to the best of our knowledge, no study has empirically investigated the effects of robot-administered praise on toddlers (18&#x2013;24 months old).</p>
<p>The effect of praise on children aged 1&#x2013;2 years was further examined from the perspective of task persistence. <xref ref-type="bibr" rid="B26">Lucca et al. (2019)</xref> conducted an experiment to investigate the impact of parental encouragement on children aged 18&#x2013;24 months. The results showed that children who received process-focused praise from their parents spent more time working on tasks than those who did not (<xref ref-type="bibr" rid="B26">Lucca et al., 2019</xref>). <xref ref-type="bibr" rid="B33">Radovanovic et al. (2023)</xref> also found that the persistence of children (17&#x2013;31 months old) was maximized when parental praise was timed to coincide with their engagement in the task. While previous work highlighted the importance of temporally contingent praise, the present study examined whether repeated, periodic process-focused praise delivered by a physically embodied social robot could support children&#x2019;s task persistence even without moment-to-moment contingency. In the present study, the robot delivered brief verbal praise statements at fixed intervals during the task, accompanied by simple social cues such as facing the child and maintaining attentive postures. Extending this line of research, we examined whether this type of process-oriented praise from a social robot would be associated with greater persistence in young children, as has been observed in interactions with human caregivers.</p>
<p>With regard to early expressions of persistence, children are widely influenced by interactions with the social environment (<xref ref-type="bibr" rid="B9">Deater-Deckard et al., 2006</xref>; <xref ref-type="bibr" rid="B27">Mokrova et al., 2013</xref>). Recent research has shown that infants&#x2019; behavioral and attentional persistence may be enhanced when they observe persistence in others (<xref ref-type="bibr" rid="B24">Leonard et al., 2017</xref>; <xref ref-type="bibr" rid="B36">Shinya and Ishibashi, 2022</xref>). Therefore, the effect of praise on children&#x2019;s persistence may depend on their awareness of others as social beings. For example, <xref ref-type="bibr" rid="B29">Okumura et al. (2023)</xref> found that five-year-olds observed by a socially interactive robot shared more stickers than those interacting with an attentional but non-interactive or stationary robot, suggesting that five-year-olds adjust their behavior based on whether the other entity is a social being. However, whether the social awareness of a robot&#x2019;s presence enhances young children&#x2019;s persistence remains unclear. In the present study, persistence refers specifically to young children&#x2019;s continued behavioral engagement with a task in the face of difficulty or lack of immediate success, operationalized as the duration and frequency of their attempts to interact with the task materials.</p>
<p>In this study, we experimentally investigated whether praise from the social robot CommU would be associated with greater persistence in young children aged 18&#x2013;24 months in the same manner as human praise does. Considering the suggestion that young children prefer small-sized robots (<xref ref-type="bibr" rid="B22">Kumazaki et al., 2020</xref>), we found CommU appropriate for children aged 18&#x2013;24 months. <xref ref-type="bibr" rid="B20">Kumazaki et al. (2018b)</xref> pointed out that CommU has a high degree of eye-movement flexibility, indicating that it can easily capture the attention of children. Given that parental encouragement has been shown to enhance task persistence in children aged 18&#x2013;24 months (<xref ref-type="bibr" rid="B26">Lucca et al., 2019</xref>) and that young children perceive robots as social beings (<xref ref-type="bibr" rid="B40">Tanaka et al., 2007</xref>), we hypothesized that praise from CommU would be associated with children&#x2019;s task persistence in a manner similar to human praise. Furthermore, we investigated whether children&#x2019;s looking behavior toward CommU would be associated with task persistence, based on the expectation that attention to a social agent may be related to children&#x2019;s engagement and behavioral adjustment (<xref ref-type="bibr" rid="B29">Okumura et al., 2023</xref>).</p>
</sec>
<sec sec-type="methods" id="s2">
<label>2</label>
<title>Methods</title>
<sec id="s2-1">
<label>2.1</label>
<title>Participants</title>
<p>Following a previous study (<xref ref-type="bibr" rid="B26">Lucca et al., 2019</xref>), we recruited children aged 18&#x2013;24 months to participate in this study. In total, 80 children were initially recruited and scheduled for participation. Of these 80 children, 16 were excluded prior to or during the experimental session for the following reasons: (1) fear of the CommU robot that prevented task engagement (<italic>n</italic> &#x3d; 2); (2) lack of motivation or persistent clinging to their mother for more than 2 min (<italic>n</italic> &#x3d; 7); and (3) experimental or procedural errors (<italic>n</italic> &#x3d; 7). Based on an <italic>a priori</italic> sample size estimation, the target sample size was 32 participants per condition. Recruitment was therefore terminated once this target was reached.</p>
<p>Subsequently, during video coding, an additional 8 children were excluded because they did not engage in the entire persistence task and thus did not provide analyzable data. As a result, the final sample used for analysis consisted of 58 children (30 girls; mean age &#x3d; 20.79 months, <italic>SD</italic> &#x3d; 2.05). Of these, 30 children were assigned to the CommU condition (mean age &#x3d; 20.87 months, <italic>SD</italic> &#x3d; 2.36), and 28 children were assigned to the Human condition (mean age &#x3d; 20.71 months, <italic>SD</italic> &#x3d; 1.70). A summary of participant recruitment, exclusions, and final sample characteristics is provided in <xref ref-type="table" rid="T1">Table1</xref>.</p>
<table-wrap id="T1" position="float">
<label>TABLE 1</label>
<caption>
<p>Participant flow and demographic characteristics.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Characteristic</th>
<th align="center">Total sample</th>
<th align="center">CommU condition</th>
<th align="center">Human condition</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">Initially recruited</td>
<td align="center">80</td>
<td align="center">&#x2014;</td>
<td align="center">&#x2014;</td>
</tr>
<tr>
<td align="center">Excluded before/During session</td>
<td align="center">16 (2 fear, 7 low motivation/Clinging, 7 procedural errors)</td>
<td align="center">&#x2014;</td>
<td align="center">&#x2014;</td>
</tr>
<tr>
<td align="center">Target sample size per condition</td>
<td align="center">64</td>
<td align="center">32</td>
<td align="center">32</td>
</tr>
<tr>
<td align="center">Excluded during coding</td>
<td align="center">8</td>
<td align="center">&#x2014;</td>
<td align="center">&#x2014;</td>
</tr>
<tr>
<td align="center">Final analyzed sample (n)</td>
<td align="center">58</td>
<td align="center">30</td>
<td align="center">28</td>
</tr>
<tr>
<td align="center">Girls (n)</td>
<td align="center">30</td>
<td align="center">&#x2014;</td>
<td align="center">&#x2014;</td>
</tr>
<tr>
<td align="center">Mean age (months) (SD)</td>
<td align="center">20.79 (2.05)</td>
<td align="center">20.87 (2.36)</td>
<td align="center">20.71 (1.70)</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>Based on an <italic>a priori</italic> sample size estimation, the target sample size was 32 participants per condition, and recruitment was terminated once this number was reached. During video coding, an additional eight children were excluded.</p>
</fn>
</table-wrap-foot>
</table-wrap>
<p>This study was approved by the Ethics Committee of Doshisha University (number: 20029).</p>
</sec>
<sec id="s2-2">
<label>2.2</label>
<title>Stimuli</title>
<p>A toy with stacked gears employed in a previous study (<xref ref-type="bibr" rid="B26">Lucca et al., 2019</xref>) was used as a stimulus. This toy consisted of a bar with a base and six disks that could be stacked on top of the bar. To prevent stacking, either a sponge was glued to the center of the disk or a rubber band was glued to the disk (<xref ref-type="fig" rid="F1">Figure 1</xref>).</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption>
<p>Stacking toy used in the task. Disks were modified with either a sponge (left) or a rubber band (right) to prevent successful stacking.</p>
</caption>
<graphic xlink:href="frobt-13-1782839-g001.tif">
<alt-text content-type="machine-generated">Stacking toy used in the task. On the left, colorful gear-shaped disks with sponge are scattered around a white base. On the right, disks bonned with yellow bands next to the base. The disks were modified with either the sponge (left) or the rubber band (right) to prevent successful stacking.</alt-text>
</graphic>
</fig>
</sec>
<sec id="s2-3">
<label>2.3</label>
<title>Procedure</title>
<sec id="s2-3-1">
<label>2.3.1</label>
<title>Experimental setup</title>
<p>In the CommU condition, the child, mother, experimental assistant, and CommU were in an experimental room. Before conducting the task, the children were allowed sufficient time (approximately 10&#x2013;20 min) to interact with CommU to relax. None of the children who participated in the experiment had previously interacted with CommU. The experimental assistant provided support to facilitate the interaction among CommU, mother, and child. After confirming that the child was familiar with CommU (e.g., talking to, laughter, or touching CommU), the experimental assistant prepared the tools used for the persistent task. Under the Human condition, the mother and child were escorted into the experimental room by an experimental assistant.</p>
</sec>
</sec>
<sec id="s2-4">
<label>2.4</label>
<title>Persistent task</title>
<p>Two conditions were set, whereby the agent either praised or did not praise the child. The with/without praise condition was a within-participant design, and the order of the conditions was randomized between participants. The agent, CommU/Human, was between participant designs.</p>
<sec id="s2-4-1">
<label>2.4.1</label>
<title>Task demonstration</title>
<p>In a typical task, the experimenter first said, &#x201c;x-chan [child&#x2019;s name], look&#x201d; to draw the child&#x2019;s attention, and then demonstrated how to remove the sponge from the disk and insert the disk into the bar. The experimenter then handed another disk to the child and said, &#x201c;Here you go, [the child&#x2019;s name].&#x201d; After the child received the disk, the child&#x2019;s behavior was observed for 2 min. To play with the toy, the child needed to insert the disk into the center of the stick; however, a disk glued to sponges or rubber bands cannot be inserted. The sponge and rubber bands were presented in random order. This task was designed to assess task persistence, defined in this study as the child&#x2019;s continued engagement with a goal-directed activity despite encountering difficulty or lack of immediate success. Because the disks could not be successfully inserted due to the attached sponge or rubber bands, children experienced repeated obstacles during the activity. The 2-min observation period, consistent with the procedure used by <xref ref-type="bibr" rid="B26">Lucca et al. (2019)</xref>, allowed us to measure how long children remained engaged in the task under these challenging conditions, which was operationalized through their trying behavior.</p>
</sec>
</sec>
<sec id="s2-5">
<label>2.5</label>
<title>CommU condition</title>
<sec id="s2-5-1">
<label>2.5.1</label>
<title>Praise condition</title>
<p>The CommU praised the child every 10 s after the disk was handed to him/her. The timing of the praise was fixed at every 10 s for 2 min. Praise was given in the following order: &#x201c;[child&#x2019;s name], you are doing well&#x201d;; &#x201c;[child&#x2019;s name], keep it up&#x201d;; and &#x201c;[child&#x2019;s name], you&#x2019;re working hard.&#x201d; These utterances constituted process-focused praise, as they emphasized the child&#x2019;s ongoing effort and engagement rather than success, ability, or task completion. The phrases were selected so that they would remain appropriate regardless of whether the child was able to successfully complete the task. In a study by <xref ref-type="bibr" rid="B38">Shiomi et al. (2021)</xref>, a human operator was incorporated into the system to control the timing of the robot&#x2019;s actions because children behaved unexpectedly. As in the study by <xref ref-type="bibr" rid="B38">Shiomi et al. (2021)</xref>, the timing of the praise was fixed (once every 10 s). We selected sentences that were not unnatural even when the child was attempting to perform an unattainable task. The operator remotely controlled the CommU and moved the CommU&#x2019;s face and body appropriately in response to the child&#x2019;s movements, allowing the praise to be presented as part of a socially responsive interaction. After 2 min, the experimenter again drew the child&#x2019;s attention and handed the child a disk that could be inserted into the stick to reduce frustration.</p>
</sec>
<sec id="s2-5-2">
<label>2.5.2</label>
<title>No Praise condition</title>
<p>The disks and stick with base used in the Praise condition were removed, and new disks and stick with base were placed on the floor. The disks had a rubber band attached to them, and the experimenter demonstrated the removal of the rubber band attached to the disk. Measurements began after the child was provided with a disk. Similar to the Praise condition, the face and body moved in accordance with the child&#x2019;s movements.</p>
</sec>
</sec>
<sec id="s2-6">
<label>2.6</label>
<title>Human condition</title>
<sec id="s2-6-1">
<label>2.6.1</label>
<title>Praise condition</title>
<p>A human praised the child every 10 s after handling the disk. The lines and order of praise were the same as in the CommU condition. After 2 min, the experimenter again drew the child&#x2019;s attention and handed the child a disk that could be inserted into the stick to reduce frustration.</p>
</sec>
<sec id="s2-6-2">
<label>2.6.2</label>
<title>No Praise condition</title>
<p>The disks and stick used in the Praise condition were removed and new disks and stick with base were placed. The disks had a rubber band attached to them, and the experimenter demonstrated the removal of the rubber band attached to the disks. Measurements began after the disks were handed over to the child. As in the Praise condition, the face and body moved in accordance with the child&#x2019;s movements.</p>
</sec>
</sec>
<sec id="s2-7">
<label>2.7</label>
<title>Coding schema</title>
<sec id="s2-7-1">
<label>2.7.1</label>
<title>Trying</title>
<p>Children&#x2019;s persistence was operationalized as &#x201c;Trying,&#x201d; was measured as described by <xref ref-type="bibr" rid="B26">Lucca et al. (2019)</xref> for the following two behaviors:<list list-type="order">
<list-item>
<p>Inserting disk into the stick: Trying was coded when the disk was placed at the tip of the stick. The start time was when the child placed the disk at the tip of the stick, and the end time was when the disk left the tip of the stick.</p>
</list-item>
<list-item>
<p>Removal of the sponge/rubber band from the disks: The time at which each child attempted to remove the sponge or rubber band glued to a disk was recorded. The start time was when the child tried putting a finger on the sponge or removed the rubber band, and the end time was when the child&#x2019;s finger left the disk.</p>
</list-item>
</list>
</p>
</sec>
<sec id="s2-7-2">
<label>2.7.2</label>
<title>Look</title>
<p>We measured the duration of the children&#x2019;s looking time in the CommU/Human condition. Look commenced when the child looked at the CommU (CommU condition) or a human (Human condition) for at least 1 s.</p>
</sec>
</sec>
</sec>
<sec sec-type="results" id="s3">
<label>3</label>
<title>Results</title>
<p>Descriptive statistics are shown for each Agent with Praise and No praise Condition (<xref ref-type="table" rid="T2">Table 2</xref>), along with the mean time spent Trying between the No Praise and Praise conditions for each Agent (<xref ref-type="fig" rid="F2">Figure 2</xref>).</p>
<table-wrap id="T2" position="float">
<label>TABLE 2</label>
<caption>
<p>Mean and standard deviation of Trying and Look with Praise and No Praise condition per Agent.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="left">Agent</th>
<th align="left">Condition</th>
<th align="left">Mean of Trying (<italic>SD</italic>)</th>
<th align="left">Mean of Look (<italic>SD</italic>)</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="left">CommU</td>
<td align="left">No praise</td>
<td align="left">26.3 (27.31)</td>
<td align="left">12.01 (11.97)</td>
</tr>
<tr>
<td align="left">CommU</td>
<td align="left">Praise</td>
<td align="left">32.22 (27.48)</td>
<td align="left">11.86 (10.1)</td>
</tr>
<tr>
<td align="left">Human</td>
<td align="left">No praise</td>
<td align="left">21.04 (26.42)</td>
<td align="left">7.84 (7.35)</td>
</tr>
<tr>
<td align="left">Human</td>
<td align="left">Praise</td>
<td align="left">24.91 (23.28)</td>
<td align="left">8.00 (8.7)</td>
</tr>
</tbody>
</table>
</table-wrap>
<fig id="F2" position="float">
<label>FIGURE 2</label>
<caption>
<p>Mean time (s) of Trying for the No Praise and Praise condition per Agent.</p>
</caption>
<graphic xlink:href="frobt-13-1782839-g002.tif">
<alt-text content-type="machine-generated">Paired box plots with individual participant data display mean trying times in seconds under No Praise and Praise conditions for CommU (left) and Human (right). Individual lines show variability in responses.</alt-text>
</graphic>
</fig>
<p>We measured Look during the task and examined the relationship between the length of Look and Trying using Pearson&#x2019;s correlation analysis. In the CommU condition, Pearson&#x2019;s correlation analysis revealed no significant correlation between Trying and Look during the Praise (<italic>r</italic> &#x3d; 0.10, <italic>p</italic> &#x3d; 0.59) and No Praise (<italic>r</italic> &#x3d; 0.35, <italic>p</italic> &#x3d; 0.06) conditions. In the Human condition, the correlation analysis revealed a significant correlation between Trying and Look in the Praise (<italic>r</italic> &#x3d; 0.58, <italic>p</italic> &#x3d; 0.001) and No Praise (<italic>r</italic> &#x3d; 0.71, <italic>p</italic> &#x3d; 0.001) conditions (<xref ref-type="fig" rid="F3">Figure 3</xref>).</p>
<fig id="F3" position="float">
<label>FIGURE 3</label>
<caption>
<p>Pearson&#x2019;s correlation between Trying and Look in the CommU (left) and Human (right) conditions. Orange &#x3d; No Praise condition, Blue &#x3d; Praise condition.</p>
</caption>
<graphic xlink:href="frobt-13-1782839-g003.tif">
<alt-text content-type="machine-generated">Scatterplot with two panels compares the relationship between time spent looking and trying behaviors under CommU and Human conditions. Each panel shows orange dots and lines for No Praise and blue dots and lines for Praise.Axes are labeled Look (seconds) on x-axis and Trying (seconds) on y-axis.</alt-text>
</graphic>
</fig>
<sec id="s3-1">
<label>3.1</label>
<title>Analysis</title>
<p>Linear mixed-effects models (LMMs) were employed using the lmer function from the lme4 package in R (<xref ref-type="bibr" rid="B4">Bates et al., 2015</xref>) to examine the effects of age, sex, Condition (Praise/No Praise), Look, and Agent (CommU/Human) on the Trying. In these models, fixed effects represent average effects of the predictors across participants, that is, the general tendencies observed after accounting for individual differences, while random intercepts for participants account for individual differences in baseline Trying. Continuous predictors (Age and Look) were standardized (z-scored) prior to analysis. For each fixed effect, <italic>B</italic> represents the estimated regression coefficient, indicating the expected change in the outcome associated with a one-unit increase in the predictor, or the difference from the reference category for categorical predictors. <italic>SE</italic> denotes the standard error of the coefficient. The <italic>t</italic> value represents a Wald statistic calculated as the ratio of the coefficient to its standard error (<italic>B/SE</italic>), which reflects the strength of evidence for the effect.</p>
<p>We included the participants as random intercepts to account for individual differences. Age and Look variables were standardized using z-scores. Model 1 was used as a baseline model to assess the effects of standardized Age and Sex on the Trying. A random intercept for participants (ID) was included as an individual difference. Model 2 was an extension of Model 1 with condition (Praise/No Praise), Standardized Look, and Agent (CommU/Human), added as fixed effects. Model 3 further explained the interaction to examine the effects of the Condition and Look on the agent&#x2019;s trying behavior. Each model is described in detail in the Appendix.</p>
<p>
<xref ref-type="table" rid="T3">Table 3</xref> presents the LMM results. In Model 1, standardized Age is positively associated with Trying (<italic>B</italic> &#x3d; 8.06, <italic>SE</italic> &#x3d; 3.11, <italic>t</italic> &#x3d; 2.59, <italic>p</italic> &#x3d; 0.01), indicating that older children tended to engage in more trying behavior. Sex does not show a significant effect (<italic>B</italic> &#x3d; &#x2212;7.18, <italic>SE</italic> &#x3d; 6.22, <italic>t</italic> &#x3d; &#x2212;1.16, <italic>p</italic> &#x3d; 0.25).</p>
<table-wrap id="T3" position="float">
<label>TABLE 3</label>
<caption>
<p>Comparison of LMMs.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Variables</th>
<th colspan="3" align="center">Model 1</th>
<th colspan="3" align="center">Model 2</th>
<th colspan="3" align="center">Model 3</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">
<italic>Predictors</italic>
</td>
<td align="center">
<italic>B</italic>
</td>
<td align="center">95%CI</td>
<td align="center">
<italic>P</italic>
</td>
<td align="center">
<italic>B</italic>
</td>
<td align="center">95%CI</td>
<td align="center">
<italic>P</italic>
</td>
<td align="center">
<italic>B</italic>
</td>
<td align="center">95%CI</td>
<td align="center">
<italic>P</italic>
</td>
</tr>
<tr>
<td align="center">(Intercept)</td>
<td align="center">29.78</td>
<td align="center">21.24&#x2013;38.31</td>
<td align="center">
<bold>&#x3c;0.001</bold>
</td>
<td align="center">26.08</td>
<td align="center">16.45&#x2013;35.72</td>
<td align="center">
<bold>&#x3c;0.001</bold>
</td>
<td align="center">26.01</td>
<td align="center">16.18&#x2013;35.84</td>
<td align="center">
<bold>&#x3c;0.001</bold>
</td>
</tr>
<tr>
<td align="center">Standardized Age</td>
<td align="center">8.06</td>
<td align="center">1.90&#x2013;14.22</td>
<td align="center">
<bold>0.011</bold>
</td>
<td align="center">7.15</td>
<td align="center">1.54&#x2013;12.76</td>
<td align="center">
<bold>0.013</bold>
</td>
<td align="center">6.58</td>
<td align="center">0.99&#x2013;12.16</td>
<td align="center">
<bold>0.021</bold>
</td>
</tr>
<tr>
<td align="center">Sex [m]</td>
<td align="center">&#x2212;7.18</td>
<td align="center">&#x2212;19.46&#x2013;5.10</td>
<td align="center">0.249</td>
<td align="center">&#x2212;4.17</td>
<td align="center">&#x2212;15.45&#x2013;7.10</td>
<td align="center">0.465</td>
<td align="center">&#x2212;3.59</td>
<td align="center">&#x2212;14.74&#x2013;7.57</td>
<td align="center">0.525</td>
</tr>
<tr>
<td align="center">Condition [Praise]</td>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="center">5.09</td>
<td align="center">0.23&#x2013;9.95</td>
<td align="center">
<bold>0.040</bold>
</td>
<td align="center">6.24</td>
<td align="center">&#x2212;0.57&#x2013;13.04</td>
<td align="center">0.072</td>
</tr>
<tr>
<td align="center">Standardized Look</td>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="center">10.61</td>
<td align="center">4.80&#x2013;16.41</td>
<td align="center">
<bold>&#x3c;0.001</bold>
</td>
<td align="center">7.29</td>
<td align="center">0.18&#x2013;14.41</td>
<td align="center">
<bold>0.045</bold>
</td>
</tr>
<tr>
<td align="center">Agent [human]</td>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="center">&#x2212;0.63</td>
<td align="center">&#x2212;12.10&#x2013;10.85</td>
<td align="center">0.914</td>
<td align="center">1.16</td>
<td align="center">&#x2212;11.21&#x2013;13.52</td>
<td align="center">0.853</td>
</tr>
<tr>
<td align="center">Condition [Praise] x Agent [human]</td>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="center">&#x2212;2.37</td>
<td align="center">&#x2212;12.17&#x2013;7.42</td>
<td align="center">0.632</td>
</tr>
<tr>
<td align="center">Agent [human] x standardized Look</td>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="center">9.34</td>
<td align="center">&#x2212;2.57&#x2013;21.26</td>
<td align="center">0.123</td>
</tr>
<tr>
<td align="center">Observations</td>
<td colspan="3" align="center">116</td>
<td colspan="3" align="center">116</td>
<td colspan="3" align="center">116</td>
</tr>
<tr>
<td align="center">Marginal R<sup>2</sup>/Conditional R<sup>2</sup>
</td>
<td colspan="3" align="center">0.108/0.746</td>
<td colspan="3" align="center">0.264/0.764</td>
<td colspan="3" align="center">0.287/0.764</td>
</tr>
<tr>
<td align="center">AIC</td>
<td colspan="3" align="center">1046</td>
<td colspan="3" align="center">1034</td>
<td colspan="3" align="center">1035</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>The Bold font indicates statistical significance (p &#x003C; 0.05).</p>
</fn>
</table-wrap-foot>
</table-wrap>
<p>In Model 2, standardized Age remains significant (<italic>B</italic> &#x3d; 7.15, <italic>SE</italic> &#x3d; 2.83, <italic>t</italic> &#x3d; 2.53, <italic>p</italic> &#x3d; 0.01). In addition, Condition (Praise vs. No Praise) and Look are significant predictors of Trying (Condition: <italic>B</italic> &#x3d; 5.09, <italic>SE</italic> &#x3d; 2.45, <italic>t</italic> &#x3d; 2.08, <italic>p</italic> &#x3d; 0.04; Look: <italic>B</italic> &#x3d; 10.61, <italic>SE</italic> &#x3d; 2.93, <italic>t</italic> &#x3d; 3.62, <italic>p</italic> &#x3d; 0.001), indicating that children in the Praise condition and those who looked at the agent for longer tended to engage in more trying behavior.</p>
<p>In Model 3, standardized Age, Condition, and Look remain significant predictors of Trying (Standardized Age: <italic>B</italic> &#x3d; 6.58, <italic>SE</italic> &#x3d; 2.82, <italic>t</italic> &#x3d; 2.34, <italic>p</italic> &#x3d; 0.02; Look: <italic>B</italic> &#x3d; 7.29, <italic>SE</italic> &#x3d; 3.59, <italic>t</italic> &#x3d; 2.03, <italic>p</italic> &#x3d; 0.05). The effect of the condition showed a marginal trend (<italic>B</italic> &#x3d; 6.24, <italic>SE</italic> &#x3d; 3.43, <italic>t</italic> &#x3d; 1.82, <italic>p</italic> &#x3d; 0.07). No significant interaction effect is observed between Agent and Condition (<italic>p</italic> &#x3d; 0.63) or between Agent and Look (<italic>p</italic> &#x3d; 0.13), indicating that the effects of Praise and Look did not differ between the CommU and Human. The main effect of Condition, which is significant in Model 2 becomes a meaningful trend (<italic>p</italic> &#x3d; 0.07).</p>
<p>Model comparisons were conducted using likelihood ratio tests (LRTs), which evaluate whether a more complex model provides a significantly better fit to the data than a simpler, nested model. In the LRTs, Model 2 exhibits a significantly improved fit compared to Model 1 (&#x3c7;<sup>2</sup> (3) &#x3d; 17.79, <italic>p</italic> &#x3c; 0.001). However, no significant effect is observed between Model 2 and Model 3 (&#x3c7;<sup>2</sup> (2) &#x3d; 2.87, <italic>p</italic> &#x3d; 0.24), suggesting that the effect of interaction of Model 3 does not improve significantly. Model fit was also evaluated using the Akaike Information Criterion (AIC), a measure that balances model fit and model complexity, with lower values indicating better fit. Model 2 showed the lowest AIC (<italic>AIC</italic> &#x3d; 1034), suggesting that it provided the best overall balance between explanatory power and parsimony.</p>
</sec>
</sec>
<sec sec-type="discussion" id="s4">
<label>4</label>
<title>Discussion</title>
<p>This study aimed to examine whether children aged 18&#x2013;24 months show greater task persistence in task-related behavior when praised by the social robot CommU, similar to when praised by a human. We examined the effects of Condition (Praise/No Praise), Agent (CommU/Human), and Look (children&#x2019;s looking time at the agent), as predictors of children&#x2019;s persistence in their tasks. Using the LMM, we found that Age, Condition (Praise/No Praise), and children&#x2019;s Look toward the agent were significantly associated with task persistence. Specifically, persistence tended to increase with age, and children showed greater persistence when they received praise and when they looked at the Agent for longer periods. Importantly, model comparison based on the AIC indicated that a model including only these main effects (Model 2) provided the best balance between explanatory power and parsimony. Accordingly, adding interaction terms between Agent and Condition or between Look and Condition did not improve model fit, suggesting that the associations between Praise, Look, and task persistence were similar across Agent types and conditions.</p>
<p>Our results suggest that the praise of CommU is associated with greater task persistence in children aged 18&#x2013;24 months, and is consistent with previous research (<xref ref-type="bibr" rid="B26">Lucca et al., 2019</xref>), which found that young children who received parental praise persisted in their tasks for a longer period. This also aligns with <xref ref-type="bibr" rid="B38">Shiomi et al. (2021)</xref>, who demonstrated that children aged 5&#x2013;6 years spend more time working on a task when encouraged by social robots. Interestingly, our study found that praise from CommU was associated with greater task persistence compared to the No Praise condition. This suggests that even children as young as 18&#x2013;24 months of age may respond to social signals from CommU.</p>
<p>In addition, regardless of whether CommU provided praise, the children were drawn to the robot, which led them to look at it. <xref ref-type="bibr" rid="B29">Okumura et al. (2023)</xref> found that five-year-olds engaged in strategic reputation management by sharing more stickers when observed by a social robot than by a non-interactive robot. This suggests that the presence of a robot is relevant to children&#x2019;s attention to social cues and may be related to their social behavior.</p>
<p>Similarly, <xref ref-type="bibr" rid="B20">Kumazaki et al. (2018b)</xref> noted that CommU&#x2019;s high degree of eye-movement freedom naturally draws user attention. <xref ref-type="bibr" rid="B3">Anzalone et al. (2014)</xref> used the Nao robot, a design relatively similar to CommU, to train children with ASD and found that the JA scores in children with ASD significantly decreased when using Nao. Since Nao&#x2019;s eyes are smaller than CommU&#x2019;s, children with ASD may focus on the non-eye-related features of Nao (<xref ref-type="bibr" rid="B31">Pennisi et al., 2016</xref>). Considering these studies, CommU&#x2019;s eye features may have increased children&#x2019;s social awareness, contributing to their engagement in the task.</p>
<p>A significant Age effect was also observed, consistent with the findings of <xref ref-type="bibr" rid="B33">Radovanovic et al. (2023)</xref>, who demonstrated a linear relationship between age and engagement in a task among children aged 17&#x2013;31 months. Previous studies have suggested that as children aged 1&#x2013;2 years grow, they develop greater effortful control (<xref ref-type="bibr" rid="B35">Rothbart et al., 2000</xref>; <xref ref-type="bibr" rid="B34">Rothbart and Bates, 2007</xref>; <xref ref-type="bibr" rid="B32">Putnam et al., 2024</xref>). Hence, task persistence can be assumed to be related to the age of children in this age group.</p>
<p>Previous studies have suggested that young children prefer small and simply designed robots (<xref ref-type="bibr" rid="B22">Kumazaki et al., 2020</xref>). Additionally, children aged 1&#x2013;2 years with fewer preconceptions about robots may engage in more fundamental social interactions that do not rely on advanced conversation (<xref ref-type="bibr" rid="B40">Tanaka et al., 2007</xref>). <xref ref-type="bibr" rid="B40">Tanaka et al. (2007)</xref> used a small, simple-faced robot (QRIO) to investigate the qualitative changes over 4 months in interactions with children aged 18&#x2013;24 months. They found that robots with simpler appearances allow children to focus more on social cues, because processing excessive visual information can be challenging at this age (<xref ref-type="bibr" rid="B22">Kumazaki et al., 2020</xref>). Similarly, in the present study, the CommU&#x2019;s simple design may have allowed children to focus on social cues such as Praise and looking behaviors.</p>
<p>Communication with robots, which is less complex than that with humans, is processed at an &#x201c;intermediate difficulty&#x201d; level, making robot interactions more accessible to young children (<xref ref-type="bibr" rid="B10">Dubois-Sage et al., 2024</xref>). To further investigate these considerations, future research should examine the persistence of children aged 18&#x2013;24 months using robots other than the CommU.</p>
<p>One limitation of this study is that we could not conclusively determine whether the social reward of praise from CommU directly increased children&#x2019;s persistence. <xref ref-type="bibr" rid="B26">Lucca et al. (2019)</xref> argue that process-oriented praise teaches children the importance of their efforts. However, whether the effect observed in our study was due to process-oriented vocalizations or simply the presence of vocal praise remains unclear. Future research should include control conditions in which CommU provides meaningless utterances during tasks, to isolate the effects of praise as a social reward.</p>
<p>In addition, as in previous studies, predicting young children&#x2019;s responses was difficult; therefore, we did not implement contingent responses (<xref ref-type="bibr" rid="B38">Shiomi et al., 2021</xref>). In this study, praise was not provided contingently to avoid cases where the children assigned to the Praise condition did not engage with the disk and therefore would not receive praise. Prior research suggests that praise timing matters, and praise given during a behavior is more effective in increasing persistence than praise given afterward (<xref ref-type="bibr" rid="B33">Radovanovic et al., 2023</xref>). Future studies can compare a randomly moving robot with a robot that provides contingent praise based on children&#x2019;s responses.</p>
<p>Moreover, in this study, the experiments began only after confirming that the children had become familiar with CommU (e.g., a child-speaking CommU). However, previous research suggests that young children (aged 1&#x2013;2 years) may require more time to develop emotional bonds (<xref ref-type="bibr" rid="B1">Ahmad et al., 2019</xref>), a sense of closeness (<xref ref-type="bibr" rid="B18">Kose-Bagci et al., 2009</xref>), and psychological attribution (<xref ref-type="bibr" rid="B28">Nijssen et al., 2021</xref>; <xref ref-type="bibr" rid="B29">Okumura et al., 2023</xref>). Although few studies have explored the individual differences in children&#x2019;s interactions with robots, <xref ref-type="bibr" rid="B5">Baxter et al. (2017)</xref> highlighted the importance of these factors. <xref ref-type="bibr" rid="B41">Tolksdorf et al. (2021)</xref> found that shy children initially exhibit fewer positive reactions to robots than non-shy children, but become more comfortable over time. Therefore, future research should consider individual differences such as personality, temperament, and familiarity with robots, when examining task persistence.</p>
</sec>
<sec sec-type="conclusion" id="s5">
<label>5</label>
<title>Conclusion</title>
<p>This study offers initial evidence for a potential link between praise from CommU and task persistence in children aged 18&#x2013;24 months. We also found, for the first time, that longer looking toward the Agent was associated with greater task persistence in children, regardless of Agent type. Although caution is required when interpreting whether praise from a robot serves as a social reward equivalent to human praise (<xref ref-type="bibr" rid="B7">Dautenhahn, 2007</xref>), our findings suggest that CommU, as a simplified social agent, may be relevant to children&#x2019;s sensitivity to social signals at 1&#x2013;2 years of age. The results of this study point to the possibility that social robots may play a role in supporting aspects of early childhood engagement beyond the specific task-persistence behavior examined here. Future research should examine how the timing and contingency of robotic praise influence children&#x2019;s persistence and explore how social robots may contribute to early childhood play and learning in broader contexts.</p>
</sec>
</body>
<back>
<sec sec-type="data-availability" id="s6">
<title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec sec-type="ethics-statement" id="s7">
<title>Ethics statement</title>
<p>The studies involving humans were approved by Ethics Committee of Doshisha University (number: 20029). The studies were conducted in accordance with the local legislation and institutional requirements. Written informed consent for participation in this study was provided by the participants&#x2019; legal guardians/next of kin. Written informed consent was obtained from the individual(s), and minor(s)&#x2019; legal guardian/next of kin, for the publication of any potentially identifiable images or data included in this article.</p>
</sec>
<sec sec-type="author-contributions" id="s8">
<title>Author contributions</title>
<p>MI: Formal Analysis, Investigation, Methodology, Validation, Writing &#x2013; original draft, Writing &#x2013; review and editing, Data curation. YS: Conceptualization, Formal Analysis, Investigation, Methodology, Supervision, Validation, Writing &#x2013; original draft, Writing &#x2013; review and editing. YY: Resources, Supervision, Validation, Writing &#x2013; review and editing. HI: Resources, Supervision, Validation, Writing &#x2013; review and editing. SI: Funding acquisition, Investigation, Project administration, Resources, Supervision, Writing &#x2013; review and editing.</p>
</sec>
<ack>
<title>Acknowledgements</title>
<p>The authors are extremely grateful to the parents who completed the survey. We also thank Chiaki Kimura for her assistance.</p>
</ack>
<sec sec-type="COI-statement" id="s10">
<title>Conflict of interest</title>
<p>The author(s) declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="ai-statement" id="s11">
<title>Generative AI statement</title>
<p>The author(s) declared that generative AI was not used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p>
</sec>
<sec sec-type="disclaimer" id="s12">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<sec sec-type="supplementary-material" id="s13">
<title>Supplementary Material</title>
<p>The Supplementary Material for this article can be found online at: <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/articles/10.3389/frobt.2026.1782839/full#supplementary-material">https://www.frontiersin.org/articles/10.3389/frobt.2026.1782839/full&#x23;supplementary-material</ext-link>
</p>
<supplementary-material xlink:href="Supplementaryfile1.pdf" id="SM1" mimetype="application/pdf" xmlns:xlink="http://www.w3.org/1999/xlink"/>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ahmad</surname>
<given-names>M. I.</given-names>
</name>
<name>
<surname>Mubin</surname>
<given-names>O.</given-names>
</name>
<name>
<surname>Shahid</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Orlando</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Robot&#x2019;s adaptive emotional feedback sustains children&#x2019;s social engagement and promotes their vocabulary learning: a long-term child&#x2013;robot interaction study</article-title>. <source>Adapt. Behav.</source> <volume>27</volume>, <fpage>243</fpage>&#x2013;<lpage>266</lpage>. <pub-id pub-id-type="doi">10.1177/1059712319844182</pub-id>
</mixed-citation>
</ref>
<ref id="B2">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Akalin</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Kristoffersson</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Loutfi</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>The influence of feedback type in robot-assisted training</article-title>. <source>Multimodal Technol. Interact.</source> <volume>3</volume>, <fpage>67</fpage>. <pub-id pub-id-type="doi">10.3390/mti3040067</pub-id>
</mixed-citation>
</ref>
<ref id="B3">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Anzalone</surname>
<given-names>S. M.</given-names>
</name>
<name>
<surname>Tilmont</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Boucenna</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Xavier</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Jouen</surname>
<given-names>A. L.</given-names>
</name>
<name>
<surname>Bodeau</surname>
<given-names>N.</given-names>
</name>
<etal/>
</person-group> (<year>2014</year>). <article-title>How children with autism spectrum disorder behave and explore the 4-dimensional (spatial 3D&#x2b; time) environment during a joint attention induction task with a robot</article-title>. <source>Res. Autism Spectr. Disord.</source> <volume>8</volume>, <fpage>814</fpage>&#x2013;<lpage>826</lpage>. <pub-id pub-id-type="doi">10.1016/j.rasd.2014.03.002</pub-id>
</mixed-citation>
</ref>
<ref id="B4">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bates</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Maechler</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Bolker</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Walker</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Christensen</surname>
<given-names>R. H. B.</given-names>
</name>
<name>
<surname>Singmann</surname>
<given-names>H.</given-names>
</name>
<etal/>
</person-group> (<year>2015</year>). <article-title>Package &#x201c;lme4&#x201d;</article-title>. <source>Convergence</source> <volume>12</volume>, <fpage>2</fpage>.</mixed-citation>
</ref>
<ref id="B5">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Baxter</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Ashurst</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Read</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Kennedy</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Belpaeme</surname>
<given-names>T.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Robot education peers in a situated primary school study: personalisation promotes child learning</article-title>. <source>PLOS One</source> <volume>12</volume>, <fpage>e0178126</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0178126</pub-id>
<pub-id pub-id-type="pmid">28542648</pub-id>
</mixed-citation>
</ref>
<ref id="B6">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Belpaeme</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Vogt</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Van den Berghe</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Bergmann</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>G&#xf6;ksun</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>De Haas</surname>
<given-names>M.</given-names>
</name>
<etal/>
</person-group> (<year>2018</year>). <article-title>Guidelines for designing social robots as second language tutors</article-title>. <source>Int. J. Soc. Robot.</source> <volume>10</volume>, <fpage>325</fpage>&#x2013;<lpage>341</lpage>. <pub-id pub-id-type="doi">10.1007/s12369-018-0467-6</pub-id>
<pub-id pub-id-type="pmid">30996752</pub-id>
</mixed-citation>
</ref>
<ref id="B7">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Dautenhahn</surname>
<given-names>K.</given-names>
</name>
</person-group> (<year>2007</year>). <article-title>Socially intelligent robots: dimensions of human&#x2013;robot interaction</article-title>. <source>Philos. Trans. R. Soc. Lond. B Biol. Sci.</source> <volume>362</volume>, <fpage>679</fpage>&#x2013;<lpage>704</lpage>. <pub-id pub-id-type="doi">10.1098/rstb.2006.2004</pub-id>
<pub-id pub-id-type="pmid">17301026</pub-id>
</mixed-citation>
</ref>
<ref id="B8">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Davison</surname>
<given-names>D. P.</given-names>
</name>
<name>
<surname>Wijnen</surname>
<given-names>F. M.</given-names>
</name>
<name>
<surname>Charisi</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>van der Meij</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Reidsma</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Evers</surname>
<given-names>V.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Words of encouragement: how praise delivered by a social robot changes children&#x2019;s mindset for learning</article-title>. <source>J. Multimodal User Interfaces</source> <volume>15</volume>, <fpage>61</fpage>&#x2013;<lpage>76</lpage>. <pub-id pub-id-type="doi">10.1007/s12193-020-00353-9</pub-id>
</mixed-citation>
</ref>
<ref id="B9">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Deater&#x2010;Deckard</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Petrill</surname>
<given-names>S. A.</given-names>
</name>
<name>
<surname>Thompson</surname>
<given-names>L. A.</given-names>
</name>
<name>
<surname>DeThorne</surname>
<given-names>L. S.</given-names>
</name>
</person-group> (<year>2006</year>). <article-title>A longitudinal behavioral genetic analysis of task persistence</article-title>. <source>Dev. Sci.</source> <volume>9</volume>, <fpage>498</fpage>&#x2013;<lpage>504</lpage>. <pub-id pub-id-type="doi">10.1111/j.1467-7687.2006.00517.x</pub-id>
<pub-id pub-id-type="pmid">16911452</pub-id>
</mixed-citation>
</ref>
<ref id="B10">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Dubois-Sage</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Jacquet</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Jamet</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Baratgin</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>People with autism spectrum disorder could interact more easily with a robot than with a human: reasons and limits</article-title>. <source>Behav. Sci. (basel)</source> <volume>14</volume>, <fpage>131</fpage>. <pub-id pub-id-type="doi">10.3390/bs14020131</pub-id>
<pub-id pub-id-type="pmid">38392485</pub-id>
</mixed-citation>
</ref>
<ref id="B11">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Fasola</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Matari&#x107;</surname>
<given-names>M. J.</given-names>
</name>
</person-group> (<year>2010</year>). &#x201c;<article-title>Robot motivator: increasing user enjoyment and performance on a physical/cognitive task</article-title>,&#x201d; in <source>9th international conference on development and learning IEEE</source>, <fpage>274</fpage>&#x2013;<lpage>279</lpage>. <pub-id pub-id-type="doi">10.1109/DEVLRN.2010.5578830</pub-id>
</mixed-citation>
</ref>
<ref id="B12">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fong</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Nourbakhsh</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Dautenhahn</surname>
<given-names>K.</given-names>
</name>
</person-group> (<year>2003</year>). <article-title>A survey of socially interactive robots</article-title>. <source>Robot. Auton. Syst.</source> <volume>42</volume>, <fpage>143</fpage>&#x2013;<lpage>166</lpage>. <pub-id pub-id-type="doi">10.1016/S0921-8890(02)00372-X</pub-id>
</mixed-citation>
</ref>
<ref id="B13">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fridin</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Kindergarten social assistive robot: first meeting and ethical issues</article-title>. <source>Comput. Hum. Behav.</source> <volume>30</volume>, <fpage>262</fpage>&#x2013;<lpage>272</lpage>. <pub-id pub-id-type="doi">10.1016/j.chb.2013.09.005</pub-id>
</mixed-citation>
</ref>
<ref id="B14">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hoffman</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Ju</surname>
<given-names>W.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Designing robots with movement in mind</article-title>. <source>J. Hum.-Robot Interact.</source> <volume>3</volume> (<issue>1</issue>), <fpage>89</fpage>&#x2013;<lpage>122</lpage>. <pub-id pub-id-type="doi">10.5898/JHRI.3.1.Hoffman</pub-id>
</mixed-citation>
</ref>
<ref id="B15">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Johal</surname>
<given-names>W.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Research trends in social robots for learning</article-title>. <source>Curr. Robot. Rep.</source> <volume>1</volume>, <fpage>75</fpage>&#x2013;<lpage>83</lpage>. <pub-id pub-id-type="doi">10.1007/s43154-020-00008-3</pub-id>
</mixed-citation>
</ref>
<ref id="B17">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kanda</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Hirano</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Eaton</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Ishiguro</surname>
<given-names>H.</given-names>
</name>
</person-group> (<year>2004</year>). <article-title>Interactive robots as social partners and peer tutors for children: a field trial</article-title>. <source>Hum. Comput. Interact.</source> <volume>19</volume>, <fpage>61</fpage>&#x2013;<lpage>84</lpage>. <pub-id pub-id-type="doi">10.1207/s15327051hci1901&#x26;2_4</pub-id>
</mixed-citation>
</ref>
<ref id="B18">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kose-Bagci</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Ferrari</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Dautenhahn</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Syrdal</surname>
<given-names>D. S.</given-names>
</name>
<name>
<surname>Nehaniv</surname>
<given-names>C. L.</given-names>
</name>
</person-group> (<year>2009</year>). <article-title>Effects of embodiment and gestures on social interaction in drumming games with a humanoid robot</article-title>. <source>Adv. Robot.</source> <volume>23</volume>, <fpage>1951</fpage>&#x2013;<lpage>1996</lpage>. <pub-id pub-id-type="doi">10.1163/016918609X12518783330360</pub-id>
</mixed-citation>
</ref>
<ref id="B19">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kumazaki</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Warren</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Swanson</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Yoshikawa</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Matsumoto</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Takahashi</surname>
<given-names>H.</given-names>
</name>
<etal/>
</person-group> (<year>2018a</year>). <article-title>Can robotic systems promote self-disclosure in adolescents with autism spectrum disorder? A pilot study</article-title>. <source>Front. Psychiatry</source> <volume>9</volume>, <fpage>36</fpage>. <pub-id pub-id-type="doi">10.3389/fpsyt.2018.00036</pub-id>
<pub-id pub-id-type="pmid">29479324</pub-id>
</mixed-citation>
</ref>
<ref id="B20">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kumazaki</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Yoshikawa</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Yoshimura</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Ikeda</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Hasegawa</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Saito</surname>
<given-names>D. N.</given-names>
</name>
<etal/>
</person-group> (<year>2018b</year>). <article-title>The impact of robotic intervention on joint attention in children with autism spectrum disorders</article-title>. <source>Mol. Autism</source> <volume>9</volume>, <fpage>46</fpage>. <pub-id pub-id-type="doi">10.1186/s13229-018-0230-8</pub-id>
<pub-id pub-id-type="pmid">30202508</pub-id>
</mixed-citation>
</ref>
<ref id="B21">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kumazaki</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Muramatsu</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Yoshikawa</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Yoshimura</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Ikeda</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Hasegawa</surname>
<given-names>C.</given-names>
</name>
<etal/>
</person-group> (<year>2019</year>). <article-title>Brief report: a novel system to evaluate autism spectrum disorders using two humanoid robots</article-title>. <source>J. Autism Dev. Disord.</source> <volume>49</volume>, <fpage>1709</fpage>&#x2013;<lpage>1716</lpage>. <pub-id pub-id-type="doi">10.1007/s10803-018-3848-7</pub-id>
<pub-id pub-id-type="pmid">30539368</pub-id>
</mixed-citation>
</ref>
<ref id="B22">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kumazaki</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Muramatsu</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Yoshikawa</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Matsumoto</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Ishiguro</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Kikuchi</surname>
<given-names>M.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>Optimal robot for intervention for individuals with autism spectrum disorders</article-title>. <source>Psychiatry Clin. Neurosci.</source> <volume>74</volume>, <fpage>581</fpage>&#x2013;<lpage>586</lpage>. <pub-id pub-id-type="doi">10.1111/pcn.13132</pub-id>
<pub-id pub-id-type="pmid">32827328</pub-id>
</mixed-citation>
</ref>
<ref id="B23">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Leite</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Martinho</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Paiva</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>Social robots for long-term interaction: a survey</article-title>. <source>Int. J. Soc. Robot.</source> <volume>5</volume>, <fpage>291</fpage>&#x2013;<lpage>308</lpage>. <pub-id pub-id-type="doi">10.1007/s12369-013-0178-y</pub-id>
</mixed-citation>
</ref>
<ref id="B24">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Leonard</surname>
<given-names>J. A.</given-names>
</name>
<name>
<surname>Lee</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Schulz</surname>
<given-names>L. E.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Infants make more attempts to achieve a goal when they see adults persist</article-title>. <source>Science</source> <volume>357</volume>, <fpage>1290</fpage>&#x2013;<lpage>1294</lpage>. <pub-id pub-id-type="doi">10.1126/science.aan2317</pub-id>
<pub-id pub-id-type="pmid">28935806</pub-id>
</mixed-citation>
</ref>
<ref id="B25">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Leonard</surname>
<given-names>J. A.</given-names>
</name>
<name>
<surname>Duckworth</surname>
<given-names>A. L.</given-names>
</name>
<name>
<surname>Schulz</surname>
<given-names>L. E.</given-names>
</name>
<name>
<surname>Mackey</surname>
<given-names>A. P.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Leveraging cognitive science to foster children&#x2019;s persistence</article-title>. <source>Trends Cogn. Sci.</source> <volume>25</volume>, <fpage>642</fpage>&#x2013;<lpage>644</lpage>. <pub-id pub-id-type="doi">10.1016/j.tics.2021.05.005</pub-id>
<pub-id pub-id-type="pmid">34074578</pub-id>
</mixed-citation>
</ref>
<ref id="B26">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lucca</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Horton</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Sommerville</surname>
<given-names>J. A.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Keep trying!: parental language predicts infants&#x2019; persistence</article-title>. <source>Cognition</source> <volume>193</volume>, <fpage>104025</fpage>. <pub-id pub-id-type="doi">10.1016/j.cognition.2019.104025</pub-id>
<pub-id pub-id-type="pmid">31325720</pub-id>
</mixed-citation>
</ref>
<ref id="B27">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mokrova</surname>
<given-names>I. L.</given-names>
</name>
<name>
<surname>O&#x2019;Brien</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Calkins</surname>
<given-names>S. D.</given-names>
</name>
<name>
<surname>Leerkes</surname>
<given-names>E. M.</given-names>
</name>
<name>
<surname>Marcovitch</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>The role of persistence at preschool age in academic skills at kindergarten</article-title>. <source>Eur. J. Psychol. Educ.</source> <volume>28</volume>, <fpage>1495</fpage>&#x2013;<lpage>1503</lpage>. <pub-id pub-id-type="doi">10.1007/s10212-013-0177-2</pub-id>
</mixed-citation>
</ref>
<ref id="B28">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Nijssen</surname>
<given-names>S. R.</given-names>
</name>
<name>
<surname>M&#xfc;ller</surname>
<given-names>B. C.</given-names>
</name>
<name>
<surname>Bosse</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Paulus</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>You, robot? The role of anthropomorphic emotion attributions in children&#x2019;s sharing with a robot</article-title>. <source>Int. J. Child. Comput. Interact.</source> <volume>30</volume>, <fpage>100319</fpage>. <pub-id pub-id-type="doi">10.1016/j.ijcci.2021.100319</pub-id>
</mixed-citation>
</ref>
<ref id="B29">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Okumura</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Hattori</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Fujita</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Kobayashi</surname>
<given-names>T.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>A robot is watching me!: five&#x2010;year&#x2010;old children care about their reputation after interaction with a social robot</article-title>. <source>Child. Dev.</source> <volume>94</volume>, <fpage>865</fpage>&#x2013;<lpage>873</lpage>. <pub-id pub-id-type="doi">10.1111/cdev.13903</pub-id>
<pub-id pub-id-type="pmid">36752147</pub-id>
</mixed-citation>
</ref>
<ref id="B30">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Papadopoulos</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Lazzarino</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Miah</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Weaver</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Thomas</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Koulouglioti</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>A systematic review of the literature regarding socially assistive robots in pre-tertiary education</article-title>. <source>Comput. Educ.</source> <volume>155</volume>, <fpage>103924</fpage>. <pub-id pub-id-type="doi">10.1016/j.compedu.2020.103924</pub-id>
</mixed-citation>
</ref>
<ref id="B31">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pennisi</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Tonacci</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Tartarisco</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Billeci</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Ruta</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Gangemi</surname>
<given-names>S.</given-names>
</name>
<etal/>
</person-group> (<year>2016</year>). <article-title>Autism and social robotics: a systematic review</article-title>. <source>Autism Res.</source> <volume>9</volume>, <fpage>165</fpage>&#x2013;<lpage>183</lpage>. <pub-id pub-id-type="doi">10.1002/aur.1527</pub-id>
<pub-id pub-id-type="pmid">26483270</pub-id>
</mixed-citation>
</ref>
<ref id="B32">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Putnam</surname>
<given-names>S. P.</given-names>
</name>
<name>
<surname>Sehic</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>French</surname>
<given-names>B. F.</given-names>
</name>
<name>
<surname>Gartstein</surname>
<given-names>M. A.</given-names>
</name>
<name>
<surname>Lira Luttges</surname>
<given-names>B.</given-names>
</name>
</person-group>
<collab>486 Additional Partners in the Global Temperament Project</collab> (<year>2024</year>). <article-title>The global temperament project: parent-reported temperament in infants, toddlers, and children from 59 nations</article-title>. <source>Dev. Psychol.</source> <volume>60</volume>, <fpage>916</fpage>&#x2013;<lpage>941</lpage>. <pub-id pub-id-type="doi">10.1037/dev0001732</pub-id>
<pub-id pub-id-type="pmid">38573659</pub-id>
</mixed-citation>
</ref>
<ref id="B33">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Radovanovic</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Soldovieri</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Sommerville</surname>
<given-names>J. A.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>It takes two: process praise linking trying and success is associated with greater infant persistence</article-title>. <source>Dev. Psychol.</source> <volume>59</volume>, <fpage>1668</fpage>&#x2013;<lpage>1675</lpage>. <pub-id pub-id-type="doi">10.1037/dev0001584</pub-id>
<pub-id pub-id-type="pmid">37410443</pub-id>
</mixed-citation>
</ref>
<ref id="B34">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name>
<surname>Rothbart</surname>
<given-names>M. K.</given-names>
</name>
<name>
<surname>Bates</surname>
<given-names>J. E.</given-names>
</name>
</person-group> (<year>2007</year>). &#x201c;<article-title>Temperament</article-title>,&#x201d; in <source>Handbook of child psychology</source>. Editors <person-group person-group-type="editor">
<name>
<surname>Damon</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Lerner</surname>
<given-names>R. M.</given-names>
</name>
</person-group> (<publisher-loc>Chichester</publisher-loc>: <publisher-name>John Wiley &#x26; Sons</publisher-name>). <pub-id pub-id-type="doi">10.1002/9780470147658.chpsy0303</pub-id>
</mixed-citation>
</ref>
<ref id="B35">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Rothbart</surname>
<given-names>M. K.</given-names>
</name>
<name>
<surname>Ahadi</surname>
<given-names>S. A.</given-names>
</name>
<name>
<surname>Evans</surname>
<given-names>D. E.</given-names>
</name>
</person-group> (<year>2000</year>). <article-title>Temperament and personality: origins and outcomes</article-title>. <source>J. Pers. Soc. Psychol.</source> <volume>78</volume>, <fpage>122</fpage>&#x2013;<lpage>135</lpage>. <pub-id pub-id-type="doi">10.1037//0022-3514.78.1.122</pub-id>
<pub-id pub-id-type="pmid">10653510</pub-id>
</mixed-citation>
</ref>
<ref id="B36">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shinya</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Ishibashi</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Observing effortful adults enhances not perseverative but sustained attention in infants aged 12 months</article-title>. <source>Cogn. Dev.</source> <volume>64</volume>, <fpage>101255</fpage>. <pub-id pub-id-type="doi">10.1016/j.cogdev.2022.101255</pub-id>
</mixed-citation>
</ref>
<ref id="B37">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shiomi</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>A systematic survey of multiple social robots as a passive- and interactive-social medium</article-title>. <source>Adv. Robot.</source> <volume>38</volume>, <fpage>440</fpage>&#x2013;<lpage>454</lpage>. <pub-id pub-id-type="doi">10.1080/01691864.2023.2297900</pub-id>
</mixed-citation>
</ref>
<ref id="B38">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shiomi</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Tamura</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Kimoto</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Iio</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Akahane-Yamada</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Shimohara</surname>
<given-names>K.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Two is better than one: verification of the effect of praise from two robots on pre-school children&#x2019;s learning time</article-title>. <source>Adv. Robot.</source> <volume>35</volume>, <fpage>1132</fpage>&#x2013;<lpage>1144</lpage>. <pub-id pub-id-type="doi">10.1080/01691864.2021.1970019</pub-id>
</mixed-citation>
</ref>
<ref id="B39">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sommer</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Slaughter</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Wiles</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Owen</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Chiba</surname>
<given-names>A. A.</given-names>
</name>
<name>
<surname>Forster</surname>
<given-names>D.</given-names>
</name>
<etal/>
</person-group> (<year>2021</year>). <article-title>Can a robot teach me that? children&#x2019;s ability to imitate robots</article-title>. <source>J. Exp. Child. Psychol.</source> <volume>203</volume>, <fpage>105040</fpage>. <pub-id pub-id-type="doi">10.1016/j.jecp.2020.105040</pub-id>
<pub-id pub-id-type="pmid">33302129</pub-id>
</mixed-citation>
</ref>
<ref id="B40">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tanaka</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Cicourel</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Movellan</surname>
<given-names>J. R.</given-names>
</name>
</person-group> (<year>2007</year>). <article-title>Socialization between toddlers and robots at an early childhood education center</article-title>. <source>Proc. Natl. Acad. Sci. U. S. A.</source> <volume>104</volume>, <fpage>17954</fpage>&#x2013;<lpage>17958</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.0707769104</pub-id>
<pub-id pub-id-type="pmid">17984068</pub-id>
</mixed-citation>
</ref>
<ref id="B41">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tolksdorf</surname>
<given-names>N. F.</given-names>
</name>
<name>
<surname>Viertel</surname>
<given-names>F. E.</given-names>
</name>
<name>
<surname>Rohlfing</surname>
<given-names>K. J.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Do shy preschoolers interact differently when learning language with a social robot? An analysis of interactional behavior and word learning</article-title>. <source>Front. Robot. Ai.</source> <volume>8</volume>, <fpage>676123</fpage>. <pub-id pub-id-type="doi">10.3389/frobt.2021.676123</pub-id>
<pub-id pub-id-type="pmid">34136535</pub-id>
</mixed-citation>
</ref>
<ref id="B42">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Uchida</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Takahashi</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Ban</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Shimaya</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Minato</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Ogawa</surname>
<given-names>K.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>Japanese young women did not discriminate between robots and humans as listeners for their self-disclosure&#x2014;Pilot Study-</article-title>. <source>Multimodal Technol. Interact.</source> <volume>4</volume>, <fpage>35</fpage>. <pub-id pub-id-type="doi">10.3390/mti4030035</pub-id>
</mixed-citation>
</ref>
<ref id="B43">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>van Straten</surname>
<given-names>C. L.</given-names>
</name>
<name>
<surname>Peter</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>K&#xfc;hne</surname>
<given-names>R.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Child&#x2013;robot relationship formation: a narrative review of empirical research</article-title>. <source>Int. J. Soc. Robot.</source> <volume>12</volume>, <fpage>325</fpage>&#x2013;<lpage>344</lpage>. <pub-id pub-id-type="doi">10.1007/s12369-019-00569-0</pub-id>
<pub-id pub-id-type="pmid">32454901</pub-id>
</mixed-citation>
</ref>
<ref id="B44">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Westlund</surname>
<given-names>J. M. K.</given-names>
</name>
<name>
<surname>Park</surname>
<given-names>H. W.</given-names>
</name>
<name>
<surname>Williams</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Breazeal</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2018</year>). &#x201c;<article-title>Measuring young children&#x2019;s long-term relationships with social robots</article-title>,&#x201d; in <source>Proceedings of the 17th ACM Conference on Interaction Design and Children</source>, <fpage>207</fpage>&#x2013;<lpage>218</lpage>. <pub-id pub-id-type="doi">10.1145/3202185.3202732</pub-id>
</mixed-citation>
</ref>
<ref id="B45">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yu</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Roque</surname>
<given-names>R.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>A review of computational toys and kits for young children</article-title>. <source>Int. J. Child-Comput. Interact.</source> <volume>21</volume>, <fpage>17</fpage>&#x2013;<lpage>36</lpage>. <pub-id pub-id-type="doi">10.1016/j.ijcci.2019.04.001</pub-id>
</mixed-citation>
</ref>
</ref-list>
<fn-group>
<fn fn-type="custom" custom-type="edited-by">
<p>
<bold>Edited by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2160877/overview">Karolina Eszter Kov&#xe1;cs</ext-link>, University of Debrecen, Hungary</p>
</fn>
<fn fn-type="custom" custom-type="reviewed-by">
<p>
<bold>Reviewed by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/1028490/overview">Paolo Pagliuca</ext-link>, National Research Council (CNR), Italy</p>
<p>
<ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2697611/overview">Abdul Aziz Saefudin</ext-link>, PGRI University of Yogyakarta, Indonesia</p>
</fn>
</fn-group>
</back>
</article>