<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article article-type="editorial" dtd-version="1.3" xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Robot. AI</journal-id>
<journal-title-group>
<journal-title>Frontiers in Robotics and AI</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Robot. AI</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">2296-9144</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">1797990</article-id>
<article-id pub-id-type="doi">10.3389/frobt.2026.1797990</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Editorial</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Editorial: Intelligent assistants for all</article-title>
<alt-title alt-title-type="left-running-head">Mandischer et al.</alt-title>
<alt-title alt-title-type="right-running-head">
<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2026.1797990">10.3389/frobt.2026.1797990</ext-link>
</alt-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Mandischer</surname>
<given-names>Nils</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="corresp" rid="c001">&#x2a;</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2616034"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing - original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Kraus</surname>
<given-names>Matthias</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2636857"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Zhong</surname>
<given-names>Junpei</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/75048"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Tapus</surname>
<given-names>Adriana</given-names>
</name>
<xref ref-type="aff" rid="aff4">
<sup>4</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/571406"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
</contrib>
</contrib-group>
<aff id="aff1">
<label>1</label>
<institution>Mechatronics, University of Augsburg</institution>, <city>Augsburg</city>, <country country="DE">Germany</country>
</aff>
<aff id="aff2">
<label>2</label>
<institution>Human-Centered Artificial Intelligence, University of Augsburg</institution>, <city>Augsburg</city>, <country country="DE">Germany</country>
</aff>
<aff id="aff3">
<label>3</label>
<institution>Department of Digital Innovation and Technology, Technological and Higher Education Institute of Hong Kong</institution>, <city>Hong Kong</city>, <country country="XXX">Hong Kong SAR, China</country>
</aff>
<aff id="aff4">
<label>4</label>
<institution>ENSTA, Institut Polytechnique de Paris</institution>, <city>Palaiseau</city>, <country country="FR">France</country>
</aff>
<author-notes>
<corresp id="c001">
<label>&#x2a;</label>Correspondence: Nils Mandischer, <email xlink:href="mailto:nils.mandischer@uni-a.de">nils.mandischer@uni-a.de</email>
</corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-02-17">
<day>17</day>
<month>02</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2026</year>
</pub-date>
<volume>13</volume>
<elocation-id>1797990</elocation-id>
<history>
<date date-type="received">
<day>28</day>
<month>01</month>
<year>2026</year>
</date>
<date date-type="rev-recd">
<day>28</day>
<month>01</month>
<year>2026</year>
</date>
<date date-type="accepted">
<day>29</day>
<month>01</month>
<year>2026</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2026 Mandischer, Kraus, Zhong and Tapus.</copyright-statement>
<copyright-year>2026</copyright-year>
<copyright-holder>Mandischer, Kraus, Zhong and Tapus</copyright-holder>
<license>
<ali:license_ref start_date="2026-02-17">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<kwd-group>
<kwd>assistive robotics</kwd>
<kwd>care robotics</kwd>
<kwd>elderly people</kwd>
<kwd>human-autonomy teaming</kwd>
<kwd>human-machine interaction</kwd>
<kwd>human-robot teaming</kwd>
<kwd>people with disabilities</kwd>
<kwd>social robotics</kwd>
</kwd-group>
<funding-group>
<funding-statement>The author(s) declared that financial support was not received for this work and/or its publication.</funding-statement>
</funding-group>
<counts>
<fig-count count="0"/>
<table-count count="0"/>
<equation-count count="0"/>
<ref-count count="0"/>
<page-count count="00"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Biomedical Robotics</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
<notes notes-type="frontiers-research-topic">
<p>Editorial on the Research Topic <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/research-topics/63016">Intelligent assistants for all</ext-link> </p>
</notes>
</front>
<body>
<p>Demographic shifts have resulted in a growing population at risk of being excluded from fundamental aspects of daily life. Among them, the elderly and those with disabilities constitute some of the most marginalized members of our society. In response, political and academic discussions are increasingly emphasizing the potential of emerging technologies, such as robotics and AI, to address these challenges. This highlights the importance of collaborative efforts in researching and developing innovative methodologies and prototypes aimed at augmenting technological support to mitigate existing barriers. In the workplace, robotic and technological aids are essential for assisting with task completion, while in caregiving, social robots contribute to maintaining cognitive engagement among the elderly. Across various contexts, interactions between humans, robots, machines, and AI demonstrate significant potential for promoting inclusivity, ultimately advancing the goal of a fair and accessible society for all.</p>
<p>Including individuals with disabilities, whether stemming from age, congenital conditions, or injury, plays an important role in overcoming demographic challenges. However, its success depends on robust and accessible assistive technologies driven by novel methods for human-robot, human-machine, and human-AI interaction. This Research Topic explores innovative approaches to caregiving and workplaces that involve collaboration between individuals with disabilities and (partially) automated systems. However, the tendency towards tailored assistance risks perpetuating the perceived gap between individuals with disabilities and those without, underscoring the need for technical solutions that benefit all equally. These inclusive measures are necessary to achieve genuine equality and highlight the necessity for seamless support in caregiving and daily life.</p>
<p>In the Research Topic, we bring together interdisciplinary contributions that advance our understanding of how robotics and digital health technologies can augment human capacities, support wellbeing, and enable richer human-machine collaboration across emotional, cognitive, and physical domains. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1569040">Salem and Sumi</ext-link> systematically induced embarrassment in human-robot interaction and showed that neutral and empathic robot responses, particularly when combined with an anime-style face, can meaningfully mitigate embarrassment by shaping emotional regulation and perceived social agency in interactions with a robot. These findings have important implications for designing emotionally intelligent robotic companions. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1545733">Olatunji et al.</ext-link> employ a participatory design approach with the Stretch mobile manipulator, identifying the functional capabilities desired by older adults with cognitive and mobility impairments. They articulate key tasks, facilitators, and barriers for everyday robot support in home environments. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fdgth.2025.1529072">Mitsugi et al.</ext-link> propose an AI-mediated framework rooted in experiential and organizational learning that transforms individual caregiver experiences into shared organizational knowledge to enhance engagement, reflection, and teamwork in elderly care settings. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1568402">Schneider et al.</ext-link> introduce a graph-based model of communication for cooperative human-machine trajectory planning. The model reveals how closed communication loops beyond the action layer can enable consensual, emancipated cooperation, with simulation evidence showing multiple viable pathways to reach shared motion references. Finally, <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1534346">Reitelsh&#xf6;fer et al.</ext-link> present a marketplace-based architecture for socially adaptable robots that allows dynamic selection and adjustment of interaction characters, demonstrating feasibility and adaptability through core components such as scene analysis, agent bidding, and feedback.</p>
<p>Collectively, these contributions articulate a coherent vision of intelligent assistants that serve a diverse range of users, spanning applications from private homes and care environments to professional organization and shared environments. The works illustrate how intelligent assistants can be inclusive, context-sensitive, and scalable. By grounding technical innovation in human needs, lived experiences, and societal settings, this Research Topic underscores the potential of intelligent assistants to empower diverse users, support their autonomy and dignity, and meaningfully foster inclusion in daily life.</p>
</body>
<back>
<sec sec-type="author-contributions" id="s1">
<title>Author contributions</title>
<p>NM: Writing &#x2013; original draft, Writing &#x2013; review and editing. MK: Writing &#x2013; review and editing. JZ: Writing &#x2013; review and editing. AT: Writing &#x2013; review and editing.</p>
</sec>
<sec sec-type="COI-statement" id="s3">
<title>Conflict of interest</title>
<p>The author(s) declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
<p>The authors AT, JZ declared that they were an editorial board member of Frontiers at the time of submission. This had no impact on the peer review process and the final decision.</p>
</sec>
<sec sec-type="ai-statement" id="s4">
<title>Generative AI statement</title>
<p>The author(s) declared that generative AI was used in the creation of this manuscript. Generative AI was used while drafting the paragraph regarding the Research Topic&#x2019;s contents.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p>
</sec>
<sec sec-type="disclaimer" id="s5">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<fn-group>
<fn fn-type="custom" custom-type="edited-by">
<p>
<bold>Edited and Reviewed by</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/142793/overview">Elena De Momi</ext-link>, Polytechnic University of Milan, Italy</p>
</fn>
</fn-group>
</back>
</article>