<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article article-type="discussion" dtd-version="2.3" xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Robot. AI</journal-id>
<journal-title>Frontiers in Robotics and AI</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Robot. AI</abbrev-journal-title>
<issn pub-type="epub">2296-9144</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">633514</article-id>
<article-id pub-id-type="doi">10.3389/frobt.2021.633514</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Robotics and AI</subject>
<subj-group>
<subject>Opinion</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Can You Activate Me? From Robots to Human Brain</article-title>
<alt-title alt-title-type="left-running-head">Manzi et al.</alt-title>
<alt-title alt-title-type="right-running-head">From Robots to Human Brain</alt-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Manzi</surname>
<given-names>F.</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<xref ref-type="corresp" rid="c001">&#x2a;</xref>
<uri xlink:href="http://loop.frontiersin.org/people/375129/overview"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Di Dio</surname>
<given-names>C.</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<uri xlink:href="http://loop.frontiersin.org/people/26642/overview"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Di Lernia</surname>
<given-names>D.</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<uri xlink:href="http://loop.frontiersin.org/people/337732/overview"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Rossignoli</surname>
<given-names>D.</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff4">
<sup>4</sup>
</xref>
<xref ref-type="aff" rid="aff5">
<sup>5</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1227176/overview"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Maggioni</surname>
<given-names>M. A.</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff4">
<sup>4</sup>
</xref>
<xref ref-type="aff" rid="aff5">
<sup>5</sup>
</xref>
<uri xlink:href="http://loop.frontiersin.org/people/1152848/overview"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Massaro</surname>
<given-names>D.</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<uri xlink:href="http://loop.frontiersin.org/people/296104/overview"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Marchetti</surname>
<given-names>A.</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<uri xlink:href="http://loop.frontiersin.org/people/124362/overview"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Riva</surname>
<given-names>G.</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<xref ref-type="aff" rid="aff6">
<sup>6</sup>
</xref>
<uri xlink:href="http://loop.frontiersin.org/people/126074/overview"/>
</contrib>
</contrib-group>
<aff id="aff1">
<label>
<sup>1</sup>
</label>Research Unit on Theory of Mind, Department of Psychology, Universit&#xe0; Cattolica del Sacro Cuore, <addr-line>Milan</addr-line>, <country>Italy</country>
</aff>
<aff id="aff2">
<label>
<sup>2</sup>
</label>Humane Technology Lab, Universit&#xe0; Cattolica del Sacro Cuore, <addr-line>Milan</addr-line>, <country>Italy</country>
</aff>
<aff id="aff3">
<label>
<sup>3</sup>
</label>Department of Psychology, Universit&#xe0; Cattolica del Sacro Cuore, <addr-line>Milan</addr-line>, <country>Italy</country>
</aff>
<aff id="aff4">
<label>
<sup>4</sup>
</label>DISEIS, Department of International Economics, Institutions and Development, Universit&#xe1; Cattolica del Sacro Cuore, <addr-line>Milan</addr-line>, <country>Italy</country>
</aff>
<aff id="aff5">
<label>
<sup>5</sup>
</label>CSCC, Cognitive Science and Communication research Center, Universit&#xe1; Cattolica del Sacro Cuore, <addr-line>Milan</addr-line>, <country>Italy</country>
</aff>
<aff id="aff6">
<label>
<sup>6</sup>
</label>Applied Technology for NeuroPsychology Laboratory, Istituto Auxologico Italiano, <addr-line>Milan</addr-line>, <country>Italy</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>
<bold>Edited by:</bold>
<ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/363892">Ginevra Castellano</ext-link>, Uppsala University, Sweden</p>
</fn>
<fn fn-type="edited-by">
<p>
<bold>Reviewed by:</bold>
<ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/61814">Maria Alessandra Umilta</ext-link>, University of Parma, Italy</p>
</fn>
<corresp id="c001">&#x2a;Correspondence: F. Manzi, <email>federico.manzi@unicatt.it</email>
</corresp>
<fn fn-type="other">
<p>This article was submitted to Human-Robot Interaction, a section of the journal Frontiers in Robotics and AI</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>19</day>
<month>02</month>
<year>2021</year>
</pub-date>
<pub-date pub-type="collection">
<year>2021</year>
</pub-date>
<volume>8</volume>
<elocation-id>633514</elocation-id>
<history>
<date date-type="received">
<day>25</day>
<month>11</month>
<year>2020</year>
</date>
<date date-type="accepted">
<day>15</day>
<month>01</month>
<year>2021</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2021 Manzi, Di Dio, Di Lernia, Rossignoli, Maggioni, Massaro, Marchetti and Riva.</copyright-statement>
<copyright-year>2021</copyright-year>
<copyright-holder>Manzi, Di Dio, Di Lernia, Rossignoli, Maggioni, Massaro, Marchetti and Riva</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<kwd-group>
<kwd>human-robot interaction</kwd>
<kwd>social brain</kwd>
<kwd>long-term interaction</kwd>
<kwd>fMRI</kwd>
<kwd>EEG</kwd>
</kwd-group>
</article-meta>
</front>
<body>
<p>The effectiveness of social robots has been widely recognized in different contexts of humans&#x2019; daily life, but still little is known about the brain areas activated by observing or interacting with a robot. Research combining neuroscience, cognitive science and robotics can provide new insights into both the functioning of our brain and the implementation of robots. Behavioural studies on social robots have shown that the social perception of robots is influenced by at least two factors: physical appearance and behavior (<xref ref-type="bibr" rid="B13">Marchetti et al., 2018</xref>). How can neuroscience explain such findings? To date, studies have been conducted through the use of both EEG and fMRI techniques to investigate the brain areas involved in human-robot interaction. These studies have mainly addressed brain activations in response to paradigms involving either action performance or charged of an emotional component (<xref ref-type="fig" rid="F1">Figure 1</xref>).</p>
<p>A first set of studies analysed the effect of different types of robots varying in their level of physical anthropomorphism on the activation of the Mirror Neuron Mechanism (MNM). The neuronal activities examined through fMRI indicated that the activation of medial premotor cortex (MPFC) increased linearly over the degree of human-likeness of the robots, from the most mechanical to android ones (<xref ref-type="bibr" rid="B12">Krach et al., 2008</xref>). Electroencephalography (EEG) data associated with the mu wave&#x2013;related to the MNM&#x2013;showed a modulation of the mu rhythm as a function of the robotic agent resemblance to the human (<xref ref-type="bibr" rid="B22">Urgen et al., 2013</xref>; <xref ref-type="bibr" rid="B14">Matsuda et al., 2016</xref>). Furthermore, the fMRI findings on MNM indicated that the premotor cortex is similarly activated when actions are performed by different types of robots (more mechanical or android) (<xref ref-type="bibr" rid="B18">Saygin et al., 2012</xref>).</p>
<p>These evidences support the hypothesis that the premotor cortex is &#x201c;automatically&#x201d; triggered in response to both simple and complex goal-directed and intentional actions, revealing a sensitivity to both the living and non-living ontological status of the agent (<xref ref-type="bibr" rid="B8">Gazzola et al., 2007</xref>; <xref ref-type="bibr" rid="B18">Saygin et al., 2012</xref>). Activation of the premotor cortex was also found in response to a human or robotic face expressing emotions (<xref ref-type="bibr" rid="B3">Chaminade et al., 2010</xref>). Several studies in humans have found that the premotor cortex is involved in the process of emotion recognition by encoding the motor pattern, (i.e. facial expression) that characterizes a given emotional state. The visuo-motor information processed in premotor cortex is translated into affective information by means of the insula that acts as a relay station between the cortical and subcortical areas, such as the amygdala, involved in processing emotional stimuli, (e.g. <xref ref-type="bibr" rid="B1">Carr et al., 2003</xref>; <xref ref-type="bibr" rid="B23">Wicker et al., 2003</xref>; <xref ref-type="bibr" rid="B10">Iacoboni, 2009</xref>). Likewise, the parieto-prefrontal network characterizing the MNM has been found to be particularly sensitive to biological movement, (e.g. <xref ref-type="bibr" rid="B5">Dayan et al., 2007</xref>; <xref ref-type="bibr" rid="B2">Casile et al., 2009</xref>; <xref ref-type="bibr" rid="B7">Di Dio et al., 2013</xref>). Accordingly, it was demonstrated that observing a motor or emotional behaviour performed by a human-like robotic agent, resembling the human kinematics, may be sufficient to activate MNM (<xref ref-type="bibr" rid="B8">Gazzola et al., 2007</xref>; <xref ref-type="bibr" rid="B3">Chaminade et al., 2010</xref>). Additionally, investigating the vitality forms of movement, which characterize the style of an action, (e.g. rude <italic>vs</italic>. gentle) (<xref ref-type="bibr" rid="B20">Stern, 1985</xref>, <xref ref-type="bibr" rid="B19">Stern, 2010</xref>), it was shown that, besides the activation of the MNM, vitality forms activate also the dorso-central insular cortex (<xref ref-type="bibr" rid="B7">Di Dio et al., 2013</xref>; <xref ref-type="bibr" rid="B26">Di Cesare et al., 2016</xref>), which represents the relay through which information about the action style, (i.e. action kinematics) processed in the parietal MNM is invested with an affective quality. Most importantly, very recent neuroscientist evidence has shown that the same brain areas whose activation is stimulated by human vitality forms can be also evoked by robots&#x2019; actions performed by simulating human kinematics (<xref ref-type="bibr" rid="B6">Di Cesare et al., 2020</xref>), thus conveying information about the robot&#x2019;s &#x201c;emotion state&#x201d;.</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption>
<p>Robots can activate the human brain.</p>
</caption>
<graphic xlink:href="frobt-08-633514-g001.tif"/>
</fig>
<p>However, the activation of other brain areas besides the MNM, such as ventral visual areas, may be required to accommodate the robot&#x2019;s inconsistent kinematics associated with simple <italic>vs</italic>. complex goal-directed actions (<xref ref-type="bibr" rid="B8">Gazzola et al., 2007</xref>). Similarly, fMRI data showed a greater activation of posterior occipital and temporal visual cortices in response to facial expression of robot emotions compared to human emotions, reflecting a further level of processing in response to the unfamiliar stimulus, (i.e. the face of the robot) (<xref ref-type="bibr" rid="B3">Chaminade et al., 2010</xref>; <xref ref-type="bibr" rid="B11">Jung et al., 2016</xref>). Additionally, the increase in frontal theta activity&#x2013;associated with the recovery from long-term memory&#x2013;measured through EEG is greater for a mechanical robot than a human or android (<xref ref-type="bibr" rid="B22">Urgen et al., 2013</xref>), highlighting once more the involvement of a compensation process for the analysis of robot stimuli. More specifically, this finding indicates that a lower level of physical robot anthropomorphism requires more resources from memory systems to bridge the semantic gap between the agent and its action (<xref ref-type="bibr" rid="B22">Urgen et al., 2013</xref>).</p>
<p>People&#x27;s sense of affiliation with a robot during interactions is at least partially explained by the emotional responses to the robot&#x27;s behaviour. Still few studies have analysed the brain activation in response to the emotions expressed by robots. EEG data suggest that people can recognize the bodily emotions expressed by a robot, including joy and sadness, although not all the expressed emotions elicit a significant brain response in the viewer (<xref ref-type="bibr" rid="B9">Guo et al., 2019</xref>). Additionally, also fMRI data indicate that emotional expressions, (i.e. joy, anger and disgust) are perceived as more emotional when expressed by a human face than by a robot (<xref ref-type="bibr" rid="B3">Chaminade et al., 2010</xref>). As argued above, these differences could be explained by a non-perfect alignment between the robot and human kinematics expressing the emotional quality of movement.</p>
<p>Additional studies had investigated neural activation patterns related to emotional reactions when people observe a robot or a human into a violent situation. The fMRI data showed no differences in activation patterns in areas of emotional resonance when a violent action was experienced by a human or robot (<xref ref-type="bibr" rid="B16">Rosenthal-von der P&#xfc;tten et al., 2014</xref>).</p>
<p>
<xref ref-type="bibr" rid="B21">Suzuki et al., (2015)</xref> found a similar brain response measured through EEG when observing images showing either a finger of a robotic hand or a human hand getting cut with a scissor. In particular, the authors found an increased neural response in the ascending phase, (i.e. 350&#x2013;500&#xa0;ms after stimulus onset) of the P3 component at the frontal-central electrodes by painful human stimuli but not painful robot stimuli, although the difference was only marginal; in contrast, no differences were found for empathy directed toward humans and robots in the descending phase of P3, (i.e. 500&#x2013;650&#xa0;ms after stimulus onset). Based on these results, the authors suggest that humanity of the observed agent (human <italic>vs</italic>. robot) partially modulates the top-down controlled processes of empathy for pain (<xref ref-type="bibr" rid="B21">Suzuki et al., 2015</xref>), possibly also due to a greater difficulty in taking the robot&#x2019;s perspective compared to the human one (<xref ref-type="bibr" rid="B21">Suzuki et al., 2015</xref>). In this context, it is important to underline that these pioneering studies on empathy are quite heterogenous with respect to both the techniques adopted, and the stimuli used, which vary greatly both in terms of the type of robotic agent and experimental paradigm.</p>
<p>To sum up, our brain systems respond in an &#x201c;embodied&#x201d; fashion to the observation of experimental conditions involving the actions of a robot with biological or semi-biological dynamics. However, we suggest that this effect is only transitory or anyway limited to experimental settings. Our consideration is supported by the results by Cross and colleagues (<xref ref-type="bibr" rid="B4">Cross et al., 2019</xref>) indicating that a period of real interaction with a social robot can disambiguate its ontological status, thus repositioning the robot in the &#x201c;non-living category&#x201d;. This may be plausibly explained by the activation of top-down cognitive mechanisms that regulate the activity of our brain and that highlight the emerge of differences between the brain response to the human <italic>vs</italic>. robot stimuli. In other words, the automatic activation of embodied mechanisms mediated by the MNM when we observe a robot performing actions or experiencing particular human-like emotional states, (e.g. violence or pain) are facilitated in a first &#x201c;encounter&#x201d; with the robot, also given our natural tendency to anthropomorphize many different entities. Prior experience with the robot&#x2019;s actual physical and psychological limits, on the other hand, provides us with a contextual frame of reference whereby top-down processes would modulate or inhibit the response of automatic mechanisms (<xref ref-type="bibr" rid="B15">Paetzel et al., 2020</xref>; <xref ref-type="bibr" rid="B17">Rossi et al., 2020</xref>). Concluding, although further studies are necessary, we can state that the level of physical anthropomorphism, the type and kinematics of the actions performed by robots jointly activate the social brain areas, consequently increasing the perception of robots as social partners. The use of additional techniques such as Virtual Reality could also prove effective in this respect (<xref ref-type="bibr" rid="B24">Riva et al., 2018</xref>; <xref ref-type="bibr" rid="B25">Riva et al., 2019</xref>)</p>
</body>
<back>
<sec id="s1">
<title>Author Contributions</title>
<p>FM conceived the idea. FM, DDL and DR selected the articles. FM, CD, and GR finalised the idea. All authors contributed to the article.</p>
</sec>
<sec id="s2">
<title>Funding</title>
<p>This work has been supported by Universit&#xe1; Cattolica del Sacro Cuore, Milan, (D3.2 &#x2010; 2018 &#x2010; Human&#x2010;Robot Confluence project).</p>
</sec>
<sec sec-type="COI-statement" id="s3">
<title>Conflict of Interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Carr</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Iacoboni</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Dubeau</surname>
<given-names>M. C.</given-names>
</name>
<name>
<surname>Mazziotta</surname>
<given-names>J. C.</given-names>
</name>
<name>
<surname>Lenzi</surname>
<given-names>G. L.</given-names>
</name>
</person-group> (<year>2003</year>). <article-title>Neural mechanisms of empathy in humans: a relay from neural systems for imitation to limbic areas</article-title>. <source>Proc. Natl. Acad. Sci. U.S.A.</source> <volume>100</volume> (<issue>9</issue>), <fpage>5497</fpage>&#x2013;<lpage>5502</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.0935845100</pub-id> </citation>
</ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Casile</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Dayan</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Caggiano</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Hendler</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Flash</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Giese</surname>
<given-names>M. A.</given-names>
</name>
</person-group> (<year>2009</year>). <article-title>Neuronal encoding of human kinematic invariants during action observation</article-title>. <source>Cerebr. Cortex</source> <volume>20</volume>, <fpage>1647</fpage>&#x2013;<lpage>1655</lpage>. <pub-id pub-id-type="doi">10.1093/cercor/bhp229</pub-id> </citation>
</ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chaminade</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Zecca</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Blakemore</surname>
<given-names>S. J.</given-names>
</name>
<name>
<surname>Takanishi</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Frith</surname>
<given-names>C. D.</given-names>
</name>
<name>
<surname>Micera</surname>
<given-names>S.</given-names>
</name>
<etal/>
</person-group> (<year>2010</year>). <article-title>Brain response to a humanoid robot in areas implicated in the perception of human emotional gestures</article-title>. <source>PloS One</source>, <volume>5</volume> (<issue>7</issue>), <fpage>e11577</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0011577</pub-id> </citation>
</ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Cross</surname>
<given-names>E. S.</given-names>
</name>
<name>
<surname>Hortensius</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Wykowska</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>From social brains to social robots: applying neurocognitive insights to human&#x2013;robot interaction</article-title>. <source>Phil. Trans. Biol. Sci.</source> <volume>374</volume> (<issue>1771</issue>), <fpage>20180024</fpage>. <pub-id pub-id-type="doi">10.1098/rstb.2018.0024</pub-id> </citation>
</ref>
<ref id="B5">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Dayan</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Casile</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Levit-Binnun</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Giese</surname>
<given-names>M. A.</given-names>
</name>
<name>
<surname>Hendler</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Flash</surname>
<given-names>T.</given-names>
</name>
</person-group> (<year>2007</year>). <article-title>Neural representations of kinematic laws of motion: evidence for action-perception coupling</article-title>. <source>Proc. Natl. Acad. Sci. U.S.A.</source> <volume>104</volume> (<issue>51</issue>), <fpage>20582</fpage>&#x2013;<lpage>20587</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.0710033104</pub-id> </citation>
</ref>
<ref id="B26">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Di Cesare</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Valente</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Di Dio</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Ruffaldi</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Bergamasco</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Goebel</surname>
<given-names>R.</given-names>
</name>
<etal/>
</person-group> (<year>2016</year>). <article-title>Vitality forms processing in the insula during action observation: a multivoxel pattern analysis</article-title>. <source>Front. Hum. Neurosci.</source>, <volume>10</volume>, <fpage>267</fpage>. <pub-id pub-id-type="doi">10.3389/fnhum.2016.00267</pub-id>
</citation>
</ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Di Cesare</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Vannucci</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Rea</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Sciutti</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Sandini</surname>
<given-names>G.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>How attitudes generated by humanoid robots shape human brain activity</article-title>. <source>Sci. Rep.</source>, <volume>10</volume> (<issue>1</issue>), <fpage>16928</fpage>. <pub-id pub-id-type="doi">10.1038/s41598-020-73728-3</pub-id> </citation>
</ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Di Dio</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Di Cesare</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Higuchi</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Roberts</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Vogt</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Rizzolatti</surname>
<given-names>G.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>The neural correlates of velocity processing during the observation of a biological effector in the parietal and premotor cortex</article-title>. <source>Neuroimage</source> <volume>64</volume>, <fpage>425</fpage>&#x2013;<lpage>436</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2012.09.026</pub-id> </citation>
</ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gazzola</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Rizzolatti</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Wicker</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Keysers</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2007</year>). <article-title>The anthropomorphic brain: the mirror neuron system responds to human and robotic actions</article-title>. <source>Neuroimage</source> <volume>35</volume> (<issue>4</issue>), <fpage>1674</fpage>&#x2013;<lpage>1684</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2007.02.003</pub-id> </citation>
</ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Guo</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Qu</surname>
<given-names>Q.</given-names>
</name>
<name>
<surname>Duffy</surname>
<given-names>V. G.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>The effect of a humanoid robot&#x2019;s emotional behaviors on users&#x2019; emotional responses: evidence from pupillometry and electroencephalography measures</article-title>. <source>Int. J. Hum. Comput. Interact.</source>, <volume>35</volume> (<issue>20</issue>), <fpage>1947</fpage>&#x2013;<lpage>1959</lpage>. <pub-id pub-id-type="doi">10.1080/10447318.2019.1587938</pub-id> </citation>
</ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Iacoboni</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2009</year>). <article-title>Imitation, empathy, and mirror neurons</article-title>. <source>Annu. Rev. Psychol.</source> <volume>60</volume>, <fpage>653</fpage>&#x2013;<lpage>670</lpage>. <pub-id pub-id-type="doi">10.1146/annurev.psych.60.110707.163604</pub-id> </citation>
</ref>
<ref id="B11">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jung</surname>
<given-names>C. E.</given-names>
</name>
<name>
<surname>Strother</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Feil-Seifer</surname>
<given-names>D. J.</given-names>
</name>
<name>
<surname>Hutsler</surname>
<given-names>J. J.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Atypical asymmetry for processing human and robot faces in autism revealed by fNIRS</article-title>. <source>PloS One</source> <volume>11</volume> (<issue>7</issue>), <fpage>e0158804</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0158804</pub-id> </citation>
</ref>
<ref id="B12">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Krach</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Hegel</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Wrede</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Sagerer</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Binkofski</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Kircher</surname>
<given-names>T.</given-names>
</name>
</person-group> (<year>2008</year>). <article-title>Can machines think? Interaction and perspective taking with robots investigated via fMRI</article-title>. <source>PloS One</source>, <volume>3</volume> (<issue>7</issue>), <fpage>e2597</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0002597</pub-id> </citation>
</ref>
<ref id="B13">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Marchetti</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Manzi</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Itakura</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Massaro</surname>
<given-names>D.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Theory of mind and humanoid robots from a lifespan perspective</article-title>. <source>Zeitschrift F&#xfc;r Psychologie</source> <volume>226</volume> (<issue>2</issue>), <fpage>98</fpage>&#x2013;<lpage>109</lpage>. <pub-id pub-id-type="doi">10.1027/2151-2604/a000326</pub-id> </citation>
</ref>
<ref id="B14">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Matsuda</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Hiraki</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Ishiguro</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Matsuda</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Hiraki</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Ishiguro</surname>
<given-names>H.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>EEG-based mu rhythm suppression to measure the effects of appearance and motion on perceived human likeness of a robot</article-title>. <source>J. of Human-Robot Interction</source> <volume>5</volume> (<issue>1</issue>), <fpage>68</fpage>&#x2013;<lpage>81</lpage>. <pub-id pub-id-type="doi">10.5898/JHRI.5.1.Matsuda</pub-id> </citation>
</ref>
<ref id="B15">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Paetzel</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Perugia</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Castellano</surname>
<given-names>G.</given-names>
</name>
</person-group> (<year>2020</year>). &#x201c;<article-title>The persistence of first impressions: the effect of repeated interactions on the perception of a social robot</article-title>,&#x201d; in <conf-name>Proceedings of the 2020 ACM/IEEE International Conference on Human-Robot Interaction</conf-name>, <conf-date>24&#x2010;26 March 2020</conf-date>, <conf-loc>Cambridge, UK</conf-loc>, <fpage>73</fpage>&#x2013;<lpage>82</lpage>. <pub-id pub-id-type="doi">10.1145/3319502.3374786</pub-id> </citation>
</ref>
<ref id="B24">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Riva</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Wiederhold</surname>
<given-names>B. K.</given-names>
</name>
<name>
<surname>Chirico</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Di Lernia</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Mantovani</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Gaggioli</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Brain and virtual reality: what do they have in common and how to exploit their potential</article-title>. <source>Annu. Rev. Cyberther. Telemed.</source>, <volume>16</volume>, <fpage>3</fpage>&#x2013;<lpage>7</lpage>.</citation>
</ref>
<ref id="B25">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Riva</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Wiederhold</surname>
<given-names>B. K.</given-names>
</name>
<name>
<surname>Di Lernia</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Chirico</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Riva</surname>
<given-names>E. F. M.</given-names>
</name>
<name>
<surname>Mantovani</surname>
<given-names>F.</given-names>
</name>
<etal/>
</person-group> (<year>2019</year>). <article-title>Virtual reality meets artificial intelligence: the emergence of advanced digital therapeutics and digital biomarkers</article-title>. <source>Annu. Rev. Cyberther. Telemed.</source>, <volume>17</volume>, <fpage>3</fpage>&#x2013;<lpage>7</lpage>.</citation>
</ref>
<ref id="B16">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Rosenthal-von der P&#xfc;tten</surname>
<given-names>A. M.</given-names>
</name>
<name>
<surname>Schulte</surname>
<given-names>F. P.</given-names>
</name>
<name>
<surname>Eimler</surname>
<given-names>S. C.</given-names>
</name>
<name>
<surname>Sobieraj</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Hoffmann</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Maderwald</surname>
<given-names>S.</given-names>
</name>
<etal/>
</person-group> (<year>2014</year>). <article-title>Investigations on empathy towards humans and robots using fMRI</article-title>. <source>Comput. Hum. Behav.</source> <volume>33</volume>, <fpage>201</fpage>&#x2013;<lpage>212</lpage>. <pub-id pub-id-type="doi">10.1016/j.chb.2014.01.004</pub-id> </citation>
</ref>
<ref id="B17">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Rossi</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Dautenhahn</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Koay</surname>
<given-names>K. L.</given-names>
</name>
<name>
<surname>Walters</surname>
<given-names>M. L.</given-names>
</name>
<name>
<surname>Holthaus</surname>
<given-names>P.</given-names>
</name>
</person-group> (<year>2020</year>). &#x201c;<article-title>Evaluating people&#x2019;s perceptions of trust in a robot in a repeated interactions study</article-title>,&#x201d; in <source>Social robotics</source>. Editors <person-group person-group-type="editor">
<name>
<surname>Wagner</surname>
<given-names>A. R.</given-names>
</name>
<name>
<surname>Feil-Seifer</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Haring</surname>
<given-names>K. S.</given-names>
</name>
<name>
<surname>Rossi</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Williams</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>He</surname>
<given-names>H.</given-names>
</name>
<etal/>
</person-group> (<publisher-loc>New York, NY</publisher-loc>: <publisher-name>Springer International Publishing</publisher-name>), <fpage>453</fpage>&#x2013;<lpage>465</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-030-62056-1_38</pub-id> </citation>
</ref>
<ref id="B18">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Saygin</surname>
<given-names>A. P.</given-names>
</name>
<name>
<surname>Chaminade</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Ishiguro</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Driver</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Frith</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>The thing that should not be: predictive coding and the uncanny valley in perceiving human and humanoid robot actions</article-title>. <source>Soc. Cognit. Affect Neurosci.</source> <volume>7</volume> (<issue>4</issue>), <fpage>413</fpage>&#x2013;<lpage>422</lpage>. <pub-id pub-id-type="doi">10.1093/scan/nsr025</pub-id> </citation>
</ref>
<ref id="B19">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Stern</surname>
<given-names>D. N.</given-names>
</name>
</person-group> (<year>2010</year>). <source>Forms of vitality</source>. <publisher-loc>Oxford, England</publisher-loc>: <publisher-name>Oxford University Press</publisher-name>.</citation>
</ref>
<ref id="B20">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Stern</surname>
<given-names>D. N.</given-names>
</name>
</person-group> (<year>1985</year>). <source>The interpersonal world of the infant</source>. <publisher-loc>New York, NY</publisher-loc>: <publisher-name>Basic Books</publisher-name>.</citation>
</ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Suzuki</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Galli</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Ikeda</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Itakura</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Kitazaki</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Measuring empathy for human and robot hand pain using electroencephalography</article-title>. <source>Sci. Rep.</source> <volume>5</volume> (<issue>1</issue>), <fpage>15924</fpage>. <pub-id pub-id-type="doi">10.1038/srep15924</pub-id> </citation>
</ref>
<ref id="B22">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Urgen</surname>
<given-names>B. A.</given-names>
</name>
<name>
<surname>Plank</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Ishiguro</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Poizner</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Saygin</surname>
<given-names>A. P.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>EEG theta and Mu oscillations during perception of human and robot actions</article-title>. <source>Front. Neurorob.</source> <volume>7</volume>, <fpage>19</fpage>. <pub-id pub-id-type="doi">10.3389/fnbot.2013.00019</pub-id> </citation>
</ref>
<ref id="B23">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wicker</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Keysers</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Plailly</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Royet</surname>
<given-names>J. P.</given-names>
</name>
<name>
<surname>Gallese</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Rizzolatti</surname>
<given-names>G.</given-names>
</name>
</person-group> (<year>2003</year>). <article-title>Both of us disgusted in My insula: the common neural basis of seeing and feeling disgust</article-title>. <source>Neuron</source>, <volume>40</volume> (<issue>3</issue>), <fpage>655</fpage>&#x2013;<lpage>664</lpage>. <pub-id pub-id-type="doi">10.1016/s0896-6273(03)00679-2</pub-id> </citation>
</ref>
</ref-list>
</back>
</article>