<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article article-type="editorial" dtd-version="1.3" xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Robot. AI</journal-id>
<journal-title-group>
<journal-title>Frontiers in Robotics and AI</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Robot. AI</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">2296-9144</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">1785247</article-id>
<article-id pub-id-type="doi">10.3389/frobt.2026.1785247</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Editorial</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Editorial: Integrative approaches with BCI and robotics for improved human interaction</article-title>
<alt-title alt-title-type="left-running-head">Nazeer et al.</alt-title>
<alt-title alt-title-type="right-running-head">
<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2026.1785247">10.3389/frobt.2026.1785247</ext-link>
</alt-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Nazeer</surname>
<given-names>Hammad</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="corresp" rid="c001">&#x2a;</xref>
<uri xlink:href="https://loop.frontiersin.org/people/405135"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing - original draft</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Noori</surname>
<given-names>Farzan M.</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/332562"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Khan</surname>
<given-names>Rayyan Azam</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<xref ref-type="aff" rid="aff4">
<sup>4</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/405183"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
</contrib>
</contrib-group>
<aff id="aff1">
<label>1</label>
<institution>Department of Mechatronics Engineering, Air University</institution>, <city>Islamabad</city>, <country country="PK">Pakistan</country>
</aff>
<aff id="aff2">
<label>2</label>
<institution>Department of Informatics and RITMO, University of Oslo</institution>, <city>Oslo</city>, <country country="NO">Norway</country>
</aff>
<aff id="aff3">
<label>3</label>
<institution>Paul Albrechtsen Research Institute, CancerCare Manitoba</institution>, <city>Winnipeg</city>, <state>MB</state>, <country country="CA">Canada</country>
</aff>
<aff id="aff4">
<label>4</label>
<institution>Department of Electrical and Computer Engineering, University of Manitoba</institution>, <city>Winnipeg</city>, <state>MB</state>, <country country="CA">Canada</country>
</aff>
<author-notes>
<corresp id="c001">
<label>&#x2a;</label>Correspondence: Hammad Nazeer, <email xlink:href="mailto:hammad@au.edu.pk">hammad@au.edu.pk</email>
</corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-02-06">
<day>06</day>
<month>02</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2026</year>
</pub-date>
<volume>13</volume>
<elocation-id>1785247</elocation-id>
<history>
<date date-type="received">
<day>11</day>
<month>01</month>
<year>2026</year>
</date>
<date date-type="accepted">
<day>30</day>
<month>01</month>
<year>2026</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2026 Nazeer, Noori and Khan.</copyright-statement>
<copyright-year>2026</copyright-year>
<copyright-holder>Nazeer, Noori and Khan</copyright-holder>
<license>
<ali:license_ref start_date="2026-02-06">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<kwd-group>
<kwd>artificial intelligence</kwd>
<kwd>brain-computer interface BCI</kwd>
<kwd>deep learning</kwd>
<kwd>hybrid-BCI</kwd>
<kwd>machine learning</kwd>
<kwd>rehabilitation</kwd>
<kwd>robotics</kwd>
</kwd-group>
<funding-group>
<funding-statement>The author(s) declared that financial support was not received for this work and/or its publication.</funding-statement>
</funding-group>
<counts>
<fig-count count="0"/>
<table-count count="0"/>
<equation-count count="0"/>
<ref-count count="8"/>
<page-count count="00"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Computational Intelligence in Robotics</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
<notes notes-type="frontiers-research-topic">
<p>Editorial on the Research Topic <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/research-topics/63090">Integrative approaches with BCI and robotics for improved human interaction</ext-link> </p>
</notes>
</front>
<body>
<p>With the advancement in neurotechnology the human-machine interaction has been redefined, specifically with progress in brain-computer interface (BCI) systems (<xref ref-type="bibr" rid="B8">Zhang et al., 2023</xref>). By translating neural activity directly into machine-executable commands, BCIs are unlocking unprecedented possibilities in healthcare, assistive robotics, brain-controlled therapy and neurorehabilitation (<xref ref-type="bibr" rid="B4">Karikari and Koshechkin, 2023</xref>; <xref ref-type="bibr" rid="B6">Nazeer et al., 2025</xref>). Among all BCI types, invasive BCIs have better signal quality and precision, and non-invasive BCIs are cheaper, comfortable, and suitable for consumer and clinical use (<xref ref-type="bibr" rid="B5">Naseer and Hong, 2015</xref>). In non-invasive BCIs, intensive research has been performed, still there is need to cope with lower resolution and noise which can be improved with pre-processing, artificial intelligence (AI) for robotic applications (<xref ref-type="bibr" rid="B3">Hong et al., 2018</xref>; <xref ref-type="bibr" rid="B2">Hanafi et al., 2023</xref>).</p>
<p>This Research Topic explores the synergy between non-invasive neuroimaging modalities like EEG and fNIRS, and robotics to nurture more natural and intuitive collaborations between humans and artificial intelligence. It is an effort to collect current contributions on improved applications of BCIs in mental workload monitoring, bridging neural signals and natural language, explainable artificial intelligence, and robotics application for motor rehabilitation.</p>
<p>This research collection is the result of contributions from 12 authors, 14 reviewers and editors from 11 countries (Pakistan, Canada, Norway, Singapore, Germany, China, Romania, Italy, Australia, United Kingdom and United States) from Medical Institutions, Academic Institutions and Research Centers.</p>
<p>The Research Topic mainly contributed in open access datasets, decoding neural signal precisely, exploring the dominant features in deep learning models, enhancing the ability of BCIs to operate effectively in diverse real-world environments and integrating explainable-AI with neural signals to perform contrastive learning. Here is the brief summary of main contributions which demonstrated the innovative approaches to solve current challenges, technical hurdles and enhance the diverse application of BCIs.</p>
<p>Data acquisition is a vital component for efficient and robust BCI-based application. This step results in robust datasets to develop BCIs for motor rehabilitation, such as controlling assistive and therapeutic robotic devices (<xref ref-type="bibr" rid="B7">Sun et al., 2025</xref>). <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1695169">Khan et al.</ext-link> contributed a structured and preprocessed functional near infrared spectroscopy (fNIRS) based open access dataset focused on lower limb motor imagery tasks involving the ankle knee joint. This is an effort to accentuate assistive and rehabilitation robotics and advanced machine learning models to decode movement intent from fNIRS signals. This also provides a way forward for transparent understanding and reproducible studies.</p>
<p>One of the challenges and emerging area of research in BCI based neurotechnology is task-specific performance of deep learning models with explainable. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1625731">Camaret Ndir et al.</ext-link> proposed EEG-CLIP a contrastive learning framework that aligns EEG time-series data with corresponding clinical medical reports. This framework implemented few-shot and zero-shot decoding using textual prompts, allowing models to classify neural patterns using textual prompts without requiring task-specific training. This multimodal integration of neural data and text paves the way for more generalizable EEG representations, which results in analysis of diverse EEG decoding or training task-specific models with fewer samples.</p>
<p>In environments demanding rapid decision under pressure with extreme precision like aviation, cognitive state of pilot is a critical factor in safety (<xref ref-type="bibr" rid="B1">Albuquerque et al., 2020</xref>). <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1441801">Haseeb et al.</ext-link> demonstrated a passive BCI (pBCI) system designed to monitor pilots&#x2019; mental workload during real flight conditions. The system employed multinomial logistic regression with a ridge estimator to achieve 84.6% mean accuracy in detecting workload levels using a dry-electrode EEG system. This work shows the potential for real-time BCI applications to mitigate human error in complex and dynamic scenarios.</p>
<p>In recent times, deep learning models become more popular in EEG decoding but understanding what these models learn and which features are more dominant in learning are important for trust and application diversity. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1625732">Schirrmeister and Ball</ext-link> proposed novel EEG-InvNet and EEG-CosNet interpretability methods to explore features learned by the complete network. These methods allow researchers to visualize neural signals and identify expected and unexpected features, such as sub-delta frequency patterns, which may classify pathological and nonpathological EEG. The study is a step forward in embedding explainable AI in neurotechnology which may have the potential of visualization to understand the network prediction function without relying on specific predefined features.</p>
<p>These contributions explored and identified the directions of future research in integrations of BCI, robotics and human interaction. The Research Topic demonstrate that human interaction or BCI may step-forward with the integration of single and multi-modal systems with advanced robotics and artificial intelligence. By addressing these challenges in processing pipelines, learning model interpretability, data accessibility, explainable AI and diverse applications, we may move closer to establish state-of-the-art neurotechnology. This may help in improving the quality of life for users across various sectors and daily life by intuitively responding to our cognitive and physical needs.</p>
</body>
<back>
<sec sec-type="author-contributions" id="s1">
<title>Author contributions</title>
<p>HN: Conceptualization, Writing &#x2013; original draft. FN: Writing &#x2013; review and editing. RK: Writing &#x2013; review and editing.</p>
</sec>
<sec sec-type="COI-statement" id="s3">
<title>Conflict of interest</title>
<p>The author(s) declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="ai-statement" id="s4">
<title>Generative AI statement</title>
<p>The author(s) declared that generative AI was not used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p>
</sec>
<sec sec-type="disclaimer" id="s5">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<fn-group>
<fn fn-type="custom" custom-type="edited-by">
<p>
<bold>Edited and Reviewed by</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/1059653/overview">Michail-Antisthenis Tsompanas</ext-link>, University of the West of England, United Kingdom</p>
</fn>
</fn-group>
<ref-list>
<title>References</title>
<ref id="B1">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Albuquerque</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Tiwari</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Parent</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Cassani</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Gagnon</surname>
<given-names>J.-F.</given-names>
</name>
<name>
<surname>Lafond</surname>
<given-names>D.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>WAUC: a multi-modal database for mental workload assessment under physical activity</article-title>. <source>Front. Neurosci.</source> <volume>14</volume> (<issue>December</issue>), <fpage>549524</fpage>. <pub-id pub-id-type="doi">10.3389/fnins.2020.549524</pub-id>
<pub-id pub-id-type="pmid">33335465</pub-id>
</mixed-citation>
</ref>
<ref id="B2">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hanafi</surname>
<given-names>S. A.</given-names>
</name>
<name>
<surname>Bin Abdul Rahman</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Pertiwi</surname>
<given-names>D. A. A.</given-names>
</name>
<name>
<surname>Muslim</surname>
<given-names>M. A.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Brain computer interface (BCI) machine learning process: a review</article-title>. <source>J. Electron. Technol. Explor.</source> <volume>1</volume> (<issue>1</issue>), <fpage>29</fpage>&#x2013;<lpage>35</lpage>. <pub-id pub-id-type="doi">10.52465/joetex.v1i1.189</pub-id>
</mixed-citation>
</ref>
<ref id="B3">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hong</surname>
<given-names>K. S.</given-names>
</name>
<name>
<surname>Khan</surname>
<given-names>M. J.</given-names>
</name>
<name>
<surname>Hong</surname>
<given-names>M. J.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Feature extraction and classification methods for hybrid FNIRS-EEG brain-computer interfaces</article-title>. <source>Front. Hum. Neurosci.</source> <volume>12</volume> (<issue>June</issue>), <fpage>1</fpage>&#x2013;<lpage>25</lpage>. <pub-id pub-id-type="doi">10.3389/fnhum.2018.00246</pub-id>
<pub-id pub-id-type="pmid">30002623</pub-id>
</mixed-citation>
</ref>
<ref id="B4">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Karikari</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Koshechkin</surname>
<given-names>K. A.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Review on brain-computer interface technologies in healthcare</article-title>. <source>Biophys. Rev.</source> <volume>15</volume> (<issue>5</issue>), <fpage>1351</fpage>&#x2013;<lpage>1358</lpage>. <pub-id pub-id-type="doi">10.1007/s12551-023-01138-6</pub-id>
<pub-id pub-id-type="pmid">37974976</pub-id>
</mixed-citation>
</ref>
<ref id="B5">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Naseer</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Hong</surname>
<given-names>K.-S. S.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>FNIRS-based brain-computer interfaces: a review</article-title>. <source>Front. Hum. Neurosci.</source> <volume>9</volume> (<issue>January</issue>), <fpage>1</fpage>&#x2013;<lpage>15</lpage>. <pub-id pub-id-type="doi">10.3389/fnhum.2015.00003</pub-id>
<pub-id pub-id-type="pmid">25674060</pub-id>
</mixed-citation>
</ref>
<ref id="B6">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name>
<surname>Nazeer</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Naseer</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Khan</surname>
<given-names>M. J.</given-names>
</name>
<name>
<surname>Hong</surname>
<given-names>K.-S.</given-names>
</name>
</person-group> (<year>2025</year>). &#x201c;<article-title>Noninvasive brain&#x2013;computer interfaces using FNIRS, EEG, and hybrid EEG-FNIRS</article-title>,&#x201d; in <source>Brain-Computer Interfaces</source> (<publisher-name>Elsevier</publisher-name>), <fpage>297</fpage>&#x2013;<lpage>326</lpage>. <pub-id pub-id-type="doi">10.1016/B978-0-323-95439-6.00003-X</pub-id>
</mixed-citation>
</ref>
<ref id="B7">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sun</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Chen</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Liang</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Gao</surname>
<given-names>S.</given-names>
</name>
<etal/>
</person-group> (<year>2025</year>). <article-title>Signal acquisition of brain&#x2013;computer interfaces: a medical-engineering crossover perspective review</article-title>. <source>Fundam. Res.</source> <volume>5</volume> (<issue>1</issue>), <fpage>3</fpage>&#x2013;<lpage>16</lpage>. <pub-id pub-id-type="doi">10.1016/j.fmre.2024.04.011</pub-id>
<pub-id pub-id-type="pmid">40166113</pub-id>
</mixed-citation>
</ref>
<ref id="B8">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Huang</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Huang</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Yu</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>Z.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Recent progress in wearable brain&#x2013;computer interface (BCI) devices based on electroencephalogram (EEG) for medical applications: a review</article-title>. <source>Health Data Sci.</source> <volume>3</volume> (<issue>January</issue>), <fpage>0096</fpage>. <pub-id pub-id-type="doi">10.34133/hds.0096</pub-id>
<pub-id pub-id-type="pmid">38487198</pub-id>
</mixed-citation>
</ref>
</ref-list>
</back>
</article>