<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article article-type="review-article" dtd-version="1.3" xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Robot. AI</journal-id>
<journal-title-group>
<journal-title>Frontiers in Robotics and AI</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Robot. AI</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">2296-9144</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">1755883</article-id>
<article-id pub-id-type="doi">10.3389/frobt.2026.1755883</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Mini Review</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Mini-review on human-centered assurance in robot-assisted orthopedics and neurosurgery</article-title>
<alt-title alt-title-type="left-running-head">Cho et al.</alt-title>
<alt-title alt-title-type="right-running-head">
<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2026.1755883">10.3389/frobt.2026.1755883</ext-link>
</alt-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Cho</surname>
<given-names>Sue Min</given-names>
</name>
<xref ref-type="aff" rid="aff1"/>
<xref ref-type="corresp" rid="c001">&#x2a;</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2923733"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing - original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Zou</surname>
<given-names>Xinrui</given-names>
</name>
<xref ref-type="aff" rid="aff1"/>
<uri xlink:href="https://loop.frontiersin.org/people/3395771"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing - original draft</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Fleig</surname>
<given-names>Laura</given-names>
</name>
<xref ref-type="aff" rid="aff1"/>
<uri xlink:href="https://loop.frontiersin.org/people/3325406"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing - original draft</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Unberath</surname>
<given-names>Mathias</given-names>
</name>
<xref ref-type="aff" rid="aff1"/>
<uri xlink:href="https://loop.frontiersin.org/people/1256088"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
</contrib>
</contrib-group>
<aff id="aff1">
<institution>Department of Computer Science, Johns Hopkins University</institution>, <city>Baltimore</city>, <state>MD</state>, <country country="US">United States</country>
</aff>
<author-notes>
<corresp id="c001">
<label>&#x2a;</label>Correspondence: Sue Min Cho, <email xlink:href="mailto:scho72@jhu.edu">scho72@jhu.edu</email>
</corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-02-23">
<day>23</day>
<month>02</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2026</year>
</pub-date>
<volume>13</volume>
<elocation-id>1755883</elocation-id>
<history>
<date date-type="received">
<day>27</day>
<month>11</month>
<year>2025</year>
</date>
<date date-type="rev-recd">
<day>03</day>
<month>02</month>
<year>2026</year>
</date>
<date date-type="accepted">
<day>10</day>
<month>02</month>
<year>2026</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2026 Cho, Zou, Fleig and Unberath.</copyright-statement>
<copyright-year>2026</copyright-year>
<copyright-holder>Cho, Zou, Fleig and Unberath</copyright-holder>
<license>
<ali:license_ref start_date="2026-02-23">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<p>As artificial intelligence (AI) drives the development of next-generation robotic platforms and navigation systems that operate with increasing levels of autonomy in orthopedic and neurosurgical procedures, the methods by which human operators verify and validate these systems&#x2019; operations become critically important. While significant effort has been spent on advancing technological capabilities and autonomy, comparatively little thought has been put into understanding how surgeons may effectively maintain oversight and assurance of these complex systems&#x2013;despite retaining full legal and ethical responsibility for surgical outcomes. This mini-review synthesizes assurance mechanisms following the Sense-Think-Act framework: spatial intelligence (navigation and registration), cognitive assistance (AI-driven planning and adaptation), and physical operation (robot motion and force interaction). We highlight human-centered assurance as an opportunity to enable safe adoption of increasingly autonomous surgical systems. Finally, we outline essential research directions for developing assurance frameworks that scale with increasing autonomy while maintaining human responsibility and control in orthopedic and neurosurgical procedures.</p>
</abstract>
<kwd-group>
<kwd>autonomous systems</kwd>
<kwd>human-robot interaction</kwd>
<kwd>medical robotics</kwd>
<kwd>safety validation</kwd>
<kwd>surgical automation</kwd>
</kwd-group>
<funding-group>
<funding-statement>The author(s) declared that financial support was received for this work and/or its publication. This work was supported in part by Johns Hopkins Internal Funds and by a Google Research Scholar Award. The funder was not involved in the study design, collection, analysis, interpretation of data, the writing of this article, or the decision to submit it for publication.</funding-statement>
</funding-group>
<counts>
<fig-count count="0"/>
<table-count count="1"/>
<equation-count count="0"/>
<ref-count count="69"/>
<page-count count="00"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Biomedical Robotics</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="s1">
<label>1</label>
<title>Introduction</title>
<p>The integration of robot-assisted surgical systems into orthopedic and neurosurgical procedures has revolutionized surgical practice, offering unprecedented precision and consistency for challenging minimally invasive approaches (<xref ref-type="bibr" rid="B48">Osman et al., 2025</xref>; <xref ref-type="bibr" rid="B7">Bunch et al., 2025</xref>; <xref ref-type="bibr" rid="B55">Ram et al., 2023</xref>; <xref ref-type="bibr" rid="B15">Doulgeris et al., 2015</xref>). These specialties, where millimeter errors can be catastrophic and actions like bone removal are irreversible, particularly benefit from robotic assistance. However, as these systems grow more sophisticated&#x2013;incorporating increase in autonomy, advanced navigation capabilities, and AI-driven decision support&#x2013;a critical challenge emerges: ensuring that human operators can effectively verify, validate, and maintain oversight of their integrated operation (<xref ref-type="bibr" rid="B69">Yang et al., 2017</xref>).</p>
<p>Despite substantial efforts devoted to advancing technological capabilities, the methods by which human operators assure safe and correct system operation remain comparatively underexplored. This gap is particularly concerning given that legal and ethical responsibility for surgical outcomes remains firmly with the humans, regardless of the level of technological assistance employed (<xref ref-type="bibr" rid="B18">Fosch-Villaronga et al., 2021</xref>; <xref ref-type="bibr" rid="B50">O&#x2019;Sullivan et al., 2019</xref>). While a robotic system may execute a bone cut or an AI algorithm may suggest an optimal trajectory, the surgeon must ultimately ensure these actions are appropriate, accurate, and safe for each individual patient.</p>
<p>Given the complex and variable nature of surgical interventions, including anatomical variation, unforeseen complications, and dynamic tissue interactions, surgical systems must be designed to preserve meaningful human oversight. This human-centered assurance, which we define as the methods and interfaces enabling surgeons to monitor, verify, and intervene in robot-assisted surgical systems, represents a critical opportunity to enable the safe adoption of increasingly autonomous surgical technologies. Lessons from autonomous vehicle development demonstrate that effective human-machine teaming requires deliberate design of trust calibration mechanisms, graduated autonomy frameworks, and transparent system communication (<xref ref-type="bibr" rid="B40">Lee and See, 2004</xref>; <xref ref-type="bibr" rid="B53">Parasuraman and Manzey, 2010</xref>; <xref ref-type="bibr" rid="B12">Committee, 2021</xref>). Similarly, regulatory bodies, including the FDA, have begun requiring transparency and human oversight provisions for AI-enabled medical devices (<xref ref-type="bibr" rid="B60">Shah et al., 2025</xref>), recognizing that meaningful human control is foundational to safe deployment.</p>
<p>This evolving landscape presents a significant opportunity: proactively developing human-centered assurance methods that scale alongside technological advancement. Rather than viewing oversight as a constraint on autonomy, effective assurance frameworks can enable the safe deployment of more capable systems by ensuring that human operators maintain calibrated trust and appropriate situational awareness (<xref ref-type="bibr" rid="B16">Endsley, 2017</xref>; <xref ref-type="bibr" rid="B28">Hoff and Bashir, 2015</xref>). The autonomous driving domain illustrates this principle through the SAE levels of driving automation, which explicitly define human roles and responsibilities at each autonomy level (<xref ref-type="bibr" rid="B12">Committee, 2021</xref>). A similar framework for surgical robotics could facilitate systematic development of assurance methods matched to system capabilities.</p>
<p>This opportunity manifests across three interconnected components that mirror the canonical robotics pipeline of Sense-Think-Act. First, spatial intelligence (Sense) integrates multiple imaging and tracking modalities to create the perceptual foundation that guides all subsequent operations. Effective assurance methods can help surgeons maintain the perceptual grounding needed to validate this spatial understanding. Second, cognitive assistance (Think) leverages AI to analyze surgical data and recommend optimal approaches. Transparent, explainable systems can enable surgeons to appropriately calibrate trust in these recommendations. Third, physical operation (Act) translates plans into robot movements and force interactions. Well-designed feedback mechanisms can ensure surgeons maintain meaningful oversight of autonomous execution. This Sense-Think-Act framework not only reflects the logical flow of autonomous systems but also provides a natural structure for developing corresponding assurance methods.</p>
<p>This mini-review provides a focused exploration of human-centered assurance methods in robotic-assisted orthopedic and neurosurgical procedures, organized according to the Sense-Think-Act framework: 1. assuring spatial intelligence (Sense); 2. assuring cognitive assistance (Think); and 3. assuring physical operation (Act). <xref ref-type="table" rid="T1">Table 1</xref> provides an overview of the assurance methods discussed in the following sections, organized according to this framework.</p>
<table-wrap id="T1" position="float">
<label>TABLE 1</label>
<caption>
<p>Current human-centered assurance methods in robot-assisted orthopedic and neurosurgery.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="left">Framework component</th>
<th align="left">Assurance method</th>
<th align="left">Key approaches</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td rowspan="4" align="left">Spatial intelligence (sense)</td>
<td align="left">Tracking technologies</td>
<td align="left">Optical tracking, electromagnetic tracking, marker-based registration</td>
</tr>
<tr>
<td align="left">Image-based navigation</td>
<td align="left">Fluoroscopy, CT, optical cameras, ultrasound-guided registration</td>
</tr>
<tr>
<td align="left">AR/MR visualization</td>
<td align="left">Reflective-AR displays, markerless AR alignment verification</td>
</tr>
<tr>
<td align="left">Uncertainty communication</td>
<td align="left">Registration confidence metrics, spatially-varying confidence visualization</td>
</tr>
<tr>
<td rowspan="3" align="left">Cognitive assistance (think)</td>
<td align="left">Explainable AI</td>
<td align="left">Saliency maps, interpretable model design, decision transparency</td>
</tr>
<tr>
<td align="left">Uncertainty quantification</td>
<td align="left">Prediction reliability indicators, confidence-aware recommendations</td>
</tr>
<tr>
<td align="left">Agentic systems</td>
<td align="left">Language-guided control, human-interpretable action representations</td>
</tr>
<tr>
<td rowspan="4" align="left">Physical operation (act)</td>
<td align="left">Shared control</td>
<td align="left">Virtual boundaries, motion constraints, tremor suppression</td>
</tr>
<tr>
<td align="left">Multimodal feedback</td>
<td align="left">Haptic feedback, visual-attention modeling, AR trajectory visualization</td>
</tr>
<tr>
<td align="left">Confidence-based control</td>
<td align="left">Dynamic autonomy adjustment, uncertainty-aware execution</td>
</tr>
<tr>
<td align="left">Predictive simulation</td>
<td align="left">Digital twins, pre-execution movement preview</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Through this analysis, we aim to synthesize current approaches, identify opportunities for advancement, and outline essential directions for future research. Our goal is to inform the development of assurance frameworks that empower humans to maintain meaningful oversight as orthopedic and neurosurgical technologies continue to advance, ultimately ensuring that these advanced systems enhance rather than compromise surgical safety and effectiveness.</p>
</sec>
<sec id="s2">
<label>2</label>
<title>Assuring spatial intelligence: navigation and registration quality</title>
<p>Spatial intelligence, the system&#x2019;s understanding of where surgical instruments are relative to patient anatomy, forms the perceptual foundation (Sense) upon which all subsequent cognitive planning and physical execution depend. Before a robotic system can recommend an optimal trajectory or execute a precise movement, it must first establish and maintain accurate spatial alignment between its coordinate system, surgical tools, and patient anatomy. Assuring this spatial understanding is therefore foundational: errors at the sensing stage propagate through and corrupt all downstream operations, regardless of how sophisticated the planning algorithms or how precise the mechanical execution (<xref ref-type="bibr" rid="B23">Gundle et al., 2017</xref>; <xref ref-type="bibr" rid="B38">Langlotz, 2004</xref>; <xref ref-type="bibr" rid="B26">Haidegger et al., 2009</xref>).</p>
<p>Current robotic systems rely on various tracking technologies (e.g., optical, electromagnetic) to establish and maintain spatial alignment (<xref ref-type="bibr" rid="B57">Saeedi-Hosseiny et al., 2023</xref>; <xref ref-type="bibr" rid="B3">Aguilera Saiz et al., 2024</xref>). While these conventional approaches offer the advantage of clear failure modes, they require invasive marker placement and external equipment (<xref ref-type="bibr" rid="B62">&#x160;uligoj et al., 2017</xref>; <xref ref-type="bibr" rid="B36">Kord et al., 2021</xref>).</p>
<p>Image-based navigation promises to overcome these limitations through markerless registration and reduced setup complexity by leveraging imaging equipment already present in most operating rooms. Fluoroscopy-based approaches are common in orthopedic and neurosurgical settings, and RGB-D cameras can also enable markerless registration (<xref ref-type="bibr" rid="B42">Liebmann et al., 2024</xref>). Machine learning methods have further begun to address registration challenges (<xref ref-type="bibr" rid="B65">Unberath et al., 2021</xref>). Yet even sophisticated systems commonly produce registration errors exceeding clinically acceptable thresholds.</p>
<p>While human surgeons recognize and compensate for subtle misalignments through experience and contextual awareness, current robotic systems typically execute plans without independent verification of registration validity, meaning registration errors directly translate into misplaced instruments or implants unless explicitly detected by the system or operator.</p>
<p>Early attempts at automated registration verification have shown limited success. <xref ref-type="bibr" rid="B66">Varnavas et al. (2015)</xref> developed automated 2D/3D registration verification but it is constrained by restrictive assumptions that fail to generalize and even within their specific application, the method failed in over 6% of cases, highlighting the inadequacy of purely algorithmic approaches for this safety-critical task. More recent learning-based registration systems, including neural rendering approaches for cross-modal 2D/3D alignment (<xref ref-type="bibr" rid="B17">Fehrentz et al., 2024</xref>), demonstrate improved registration performance under challenging visual conditions. Like many prior registration methods, they are primarily evaluated in terms of alignment accuracy, with less emphasis on how operators can assess or verify correctness during use.</p>
<p>Recognizing such limitations, researchers have shifted toward human-in-the-loop verification methods. For 3D-to-3D alignment in augmented reality (AR) and mixed reality (MR) scenarios, progress has been notable. <xref ref-type="bibr" rid="B19">Fotouhi et al. (2020)</xref> demonstrated that reflective-AR displays improve alignment precision by helping surgeons better align virtual and physical environments. Similarly, <xref ref-type="bibr" rid="B31">Kantak et al. (2024)</xref> showed that markerless AR systems could improve targeting accuracy of skull landmarks. These successes in 3D-to-3D scenarios highlight the potential of AR for spatial verification.</p>
<p>However, 2D-to-3D registration&#x2013;also critical for image-guided robotic navigation&#x2013;poses distinct challenges that remain largely unresolved. Our previous work (<xref ref-type="bibr" rid="B9">Cho et al., 2023</xref>) explored visualization paradigms for assessing 2D/3D registration in robotic spine surgery, finding that while users could differentiate error magnitudes better than chance, performance remained insufficient for reliable verification. Similar findings in pelvic registration (<xref ref-type="bibr" rid="B11">Cho et al., 2025b</xref>) confirm that visualization techniques alone cannot ensure robust verification.</p>
<p>Researchers have also explored communicating registration uncertainty to enable more nuanced assurance of spatial alignment quality. In prior work, we (<xref ref-type="bibr" rid="B10">Cho et al., 2025a</xref>) quantified the relationship between registration uncertainty metrics and actual accuracy, which can potentially provide operators with confidence indicators. <xref ref-type="bibr" rid="B22">Geshvadi et al. (2025)</xref> developed methods to visualize how registration confidence varies across the surgical field during virtual tumor resection. This approach enables surgeons to understand not just whether registration is accurate globally, but where it can be trusted.</p>
<p>Finally, recent work suggests that spatial intelligence in surgery extends well beyond estimating a single camera, tool, or anatomy alignment. Multimodal datasets such as MM-OR model the operating room as a dynamic scene of staff, tools, robots, and equipment using synchronized RGB-D, audio, robotic logs, and semantic scene graphs (<xref ref-type="bibr" rid="B49">&#xd6;zsoy et al., 2025</xref>). This perspective shows that spatial understanding is a multimodal, relational problem that requires reliable ways to assure spatial intelligence in complex surgical settings.</p>
</sec>
<sec id="s3">
<label>3</label>
<title>Assuring cognitive assistance: AI-driven planning and adaptation</title>
<p>Once spatial understanding is established, the cognitive assistance component (Think) analyzes this information alongside clinical data to generate surgical recommendations. Modern robotic surgical systems increasingly incorporate AI components for preoperative planning, intraoperative trajectory optimization, and real-time adaptation to surgical conditions. This cognitive layer transforms robots from precise positioning devices into intelligent surgical assistants capable of analyzing imaging data, suggesting optimal approaches, and adapting plans based on intraoperative findings. The opacity of these AI-driven decisions creates distinct assurance challenges, requiring methods that enable surgeons to interpret, validate, and appropriately trust algorithmic recommendations.</p>
<p>As such, the integration of AI introduces promising developments in orthopedic and neurosurgical robotics, but challenges persist, including data heterogeneity, algorithmic bias, and the &#x201c;black box&#x201d; nature of many models, alongside issues with robust validation (<xref ref-type="bibr" rid="B45">Misir and Yuce, 2025</xref>). These challenges are particularly acute in robotic surgery, where AI recommendations can directly translate into physical robotic actions with immediate patient impact, and the opacity of these AI-driven decisions creates unique assurance challenges that differ from mechanical or spatial verification.</p>
<p>Recognition of these limitations has sparked initial efforts toward AI transparency in surgical contexts. These efforts align with research in explainable artificial intelligence (XAI) for robotics, which seeks to make perception, planning, and control decisions interpretable to human operators, particularly in safety-critical settings (<xref ref-type="bibr" rid="B6">Anjomshoae et al., 2019</xref>). Within surgical contexts, <xref ref-type="bibr" rid="B27">Han et al. (2025)</xref> articulates the critical need for AI systems with built-in explainability to enable clinicians to interpret and challenge model decisions. <xref ref-type="bibr" rid="B63">Tafti et al. (2025)</xref> demonstrates how uncertainty quantification can highlight when predictions are unreliable, potentially prompting manual verification. However, these remain largely conceptual frameworks rather than implemented solutions in current robotic systems.</p>
<p>Some promising approaches are beginning to emerge from adjacent fields. In diagnostic imaging, saliency maps highlight which image regions influenced AI decisions, a technique that could be adapted to show why certain trajectories were selected. <xref ref-type="bibr" rid="B5">Amirian et al. (2023)</xref> identify opportunities for such explainable AI in orthopedics, though they acknowledge the tradeoff between interpretability and model performance.</p>
<p>In parallel, emerging work on agentic systems and language-guided control from the broader robotics community offers a complementary perspective on explainability by emphasizing structured and human-interpretable representations of action. Rather than relying solely on <italic>post hoc</italic> explanations, these approaches express planning and adaptation through explicit decision steps that can be examined, queried, or overridden by human operators. Prior work on explainable agents and robotic decision-making suggests that grounding actions in interpretable intermediate representations, including natural language, can support transparency and accountability in safety-critical settings (<xref ref-type="bibr" rid="B6">Anjomshoae et al., 2019</xref>; <xref ref-type="bibr" rid="B29">Huang et al., 2022</xref>). Recent work has demonstrated the feasibility of language-guided control in surgical settings, enabling natural language commands to control robotic X-ray systems (<xref ref-type="bibr" rid="B34">Killeen et al., 2024</xref>) and leveraging language-promptable digital twins for intelligent device control (<xref ref-type="bibr" rid="B35">Killeen et al., 2025</xref>). Beyond device control, language-based interfaces have also been explored for intraoperative surgical assistance, moving toward more natural human-machine collaboration during procedures (<xref ref-type="bibr" rid="B59">Seenivasan et al., 2025</xref>). Although fully agentic paradigms remain largely unexplored in current orthopedic and neurosurgical robotic systems, these developments highlight a promising direction for aligning AI-driven planning with the assurance requirements of intraoperative workflows.</p>
<p>The regulatory landscape is beginning to drive progress. <xref ref-type="bibr" rid="B52">Panesar et al. (2020)</xref> note that some regulators now mandate algorithmic transparency before approving AI systems for patient care, creating pressure for more interpretable systems. <xref ref-type="bibr" rid="B61">Shahid et al. (2025)</xref> found that lack of transparency remains the primary barrier to AI adoption in spinal and cranial surgery, suggesting that even basic explainability features could significantly impact clinical acceptance and safe integration with robotic systems.</p>
<p>Despite these developments, current robotic surgical systems with AI components largely maintain clear separation between AI recommendations and robotic execution. The surgeon must actively transfer AI suggestions into robot commands, preserving human oversight but limiting seamless integration. Future systems will need to develop real-time explainability suited to surgical workflows, confidence-aware execution that automatically adjusts robot autonomy based on AI certainty, and clear frameworks for which decisions require human validation versus autonomous execution.</p>
</sec>
<sec id="s4">
<label>4</label>
<title>Assuring physical operation: movement and mechanical safety</title>
<p>Building upon spatial understanding (Sense) and cognitive plans (Think), the physical operation component (Act) represents where robotic systems directly interact with patient anatomy. While errors in navigation or AI systems manifest initially as incorrect information that may propagate to cause harm, errors during physical execution result in immediate patient impact at the point of contact. This distinction introduces the unique assurance challenges of the execution phase: even when spatial registration is accurate and AI recommendations are appropriate, the physical robot must faithfully translate these inputs into safe movements and force interactions. Errors in robotic execution can cause immediate physical harm through unintended movements, excessive forces, or spatial inaccuracies (<xref ref-type="bibr" rid="B4">Alemzadeh et al., 2016</xref>; <xref ref-type="bibr" rid="B2">Agcaoglu et al., 2012</xref>; <xref ref-type="bibr" rid="B56">Rivero-Moreno et al., 2023</xref>; <xref ref-type="bibr" rid="B51">Pagani et al., 2022</xref>). In neurosurgical and orthopedic procedures, where surgeons work near critical structures and rely on precise bone preparation, even millimeter-scale position errors or modest force overshoots can result in nerve damage, vascular injury, or compromised implant fixation (<xref ref-type="bibr" rid="B24">Gurses et al., 2024</xref>; <xref ref-type="bibr" rid="B46">Moccia et al., 2018</xref>; <xref ref-type="bibr" rid="B32">Karas and Chiocca, 2007</xref>; <xref ref-type="bibr" rid="B41">Lee et al., 2024</xref>; <xref ref-type="bibr" rid="B47">Mulyadi et al., 2024</xref>).</p>
<p>Most current robotic surgical systems keep the surgeon in the loop, with the robot providing assistance such as tremor suppression, precise positioning, or motion constraints (<xref ref-type="bibr" rid="B56">Rivero-Moreno et al., 2023</xref>; <xref ref-type="bibr" rid="B1">Abdelaal et al., 2020</xref>; <xref ref-type="bibr" rid="B64">Tomasz et al., 2021</xref>). Grounded shared control can constrain tools away from anatomically critical regions, provided accurate co-registration between the robot frame and the patient anatomy (<xref ref-type="bibr" rid="B54">Payne et al., 2020</xref>). These preprogrammed constraints create virtual boundaries that prevent both the surgeon and the robot from entering dangerous zones.</p>
<p>However, as autonomy increases, robotic systems will independently execute portions of the procedure with the surgeon supervising and intervening when needed (<xref ref-type="bibr" rid="B43">Liu et al., 2024</xref>). This fundamental shift towards supervisory oversight requires new capabilities: surgeons must maintain situational awareness, be able to rapidly assess system confidence and performance, and access intervention mechanisms that are both responsive and minimally disruptive to workflow.</p>
<p>Similar challenges have already emerged in other human-robot interaction domains, including collaborative industrial robots, mobile service robots, and semi-autonomous vehicles. In these settings, humans no longer continuously control motion but instead supervise automated behaviors, intervening only when necessary (<xref ref-type="bibr" rid="B16">Endsley, 2017</xref>). To support this mode of interaction, these fields have developed methods for intent prediction, shared control, confidence-aware autonomy, and transparent system feedback. For example, cobots use adaptive impedance control, safety envelopes, and real-time human motion prediction to safely operate in close proximity to workers (<xref ref-type="bibr" rid="B25">Haddadin et al., 2017</xref>; <xref ref-type="bibr" rid="B44">Mainprice and Berenson, 2013</xref>; <xref ref-type="bibr" rid="B39">Lasota et al., 2017</xref>), while autonomous driving systems rely on uncertainty-aware planning, driver monitoring, and graded autonomy handoff mechanisms to maintain human readiness (<xref ref-type="bibr" rid="B14">Doshi and Trivedi, 2011</xref>; <xref ref-type="bibr" rid="B20">Fridman, 2018</xref>). Across these domains, effective assurance depends on both low-level safety constraints and higher-level mechanisms that communicate system intent, limitations, and confidence to human supervisors. These strategies suggest that surgical robotics can draw from a broader body of work on supervisory control, human trust calibration, and situation-aware autonomy when designing assurance mechanisms for physical operation.</p>
<p>Recognizing this gap, several emerging technologies show promise for potential physical operation assurance in robotic surgery. Digital twin technology (<xref ref-type="bibr" rid="B13">Ding et al., 2024</xref>; <xref ref-type="bibr" rid="B37">Kyeremeh et al., 2025</xref>) and simulation approaches (<xref ref-type="bibr" rid="B33">Killeen et al., 2023</xref>) can create high-fidelity virtual replicas of robotic systems and patient anatomy, potentially enabling surgeons to preview robotic actions before execution and maintain parallel virtual monitoring during procedures. This technology could provide predictive assurance by simulating planned movements and their potential consequences before physical execution.</p>
<p>AR and MR technologies represent another promising avenue, demonstrating how visualization can enhance oversight capabilities. <xref ref-type="bibr" rid="B67">V&#xf6;r&#xf6;s et al. (2022)</xref> developed an AR-based interaction scheme for robotic pedicle screw placement that not only visualizes planned trajectories but also enables intraoperative plan adjustment and direct robot control. This bidirectional interaction represents a significant advance in maintaining human oversight during robotic procedures. The potential of combining multiple sensory modalities for assurance is also shown in recent work by <xref ref-type="bibr" rid="B8">Chen et al. (2024)</xref>, who demonstrated that integrating AR visualization with haptic feedback and real-time visual-attention modeling creates a safe and ergonomic shared-control framework for robot-assisted pedicle screw drilling. Their system outperformed both full human and full robot control, suggesting that effective assurance may require multimodal feedback that aligns with surgeons&#x2019; natural perception-action coupling.</p>
<p>Beyond visualization and haptic feedback, communicating robot uncertainty and confidence levels represents a critical yet underdeveloped aspect of physical operation assurance. While not specific to orthopedic or neurosurgical applications, work in soft tissue surgery provides valuable insights that could be adapted. <xref ref-type="bibr" rid="B58">Saeidi et al. (2018)</xref> developed a confidence-based shared control strategy for the Smart Tissue Autonomous Robot (STAR) that reduced operator work time compared to pure manual control while maintaining safety through dynamic automation adjustment based on system confidence. Similarly, <xref ref-type="bibr" rid="B30">Kam et al. (2021)</xref> demonstrated confidence-based supervised-autonomous control for robotic vaginal cuff closure, showing how robots can communicate their certainty levels to guide appropriate human oversight.</p>
<p>In neurosurgical contexts, uncertainty quantification takes on additional complexity due to tissue deformation and the critical nature of anatomical structures. <xref ref-type="bibr" rid="B21">Frisken et al. (2021)</xref> advanced this field by moving beyond simple safety margins to incorporate multiple uncertainty sources. Their approach combines segmentation uncertainty and predicted brain shift into unified risk volumes, demonstrating how comprehensive uncertainty quantification can inform safer robotic operation. This multi-factorial approach to uncertainty could be extended to real-time applications, providing surgeons with dynamic confidence information throughout procedures. Extending this assurance to the physical workspace, <xref ref-type="bibr" rid="B68">Xian et al. (2025)</xref> employed CLF-CBF constraints to guarantee the safety of the supervising surgeon during close-range human-robot interaction.</p>
</sec>
<sec sec-type="discussion" id="s5">
<label>5</label>
<title>Discussion</title>
<p>Our analysis, organized around the Sense-Think-Act framework, reveals significant opportunities to advance human-centered assurance in robot-assisted orthopedic and neurosurgical procedures. While current efforts remain fragmented, promising foundations have been established across all three components of robotic systems. For physical operation, shared control paradigms and multimodal feedback demonstrate potential for maintaining effective human oversight. For spatial intelligence, innovative visualization methods are beginning to help surgeons detect registration errors, though reliability improvements are needed. For AI assistance, growing recognition of explainability needs is driving initial development efforts, even as implementation remains in early stages.</p>
<p>As robotic systems become more sophisticated and gain autonomy, the need for effective human oversight becomes paradoxically more critical yet more challenging. This evolution presents opportunities for transformative advances in several key areas. First, integrated assurance architectures can move beyond separate solutions for physical, spatial, and cognitive components toward unified frameworks that propagate uncertainty and confidence information across all system aspects. When registration quality degrades, for instance, this information should automatically influence both physical execution parameters and the confidence in AI decisions. Second, predictive methods can enable operators to anticipate potential failures before they occur, shifting from reactive error detection to proactive risk management. Third, context-aware adaptation can address how assurance requirements vary dramatically by anatomical location and procedure phase, with systems dynamically adjusting oversight sensitivity based on clinical context.</p>
<p>The transition from human-operated to human-supervised robotic surgery represents a fundamental shift that current assurance methods are approaching but have not yet fully addressed. As autonomy increases, the surgeon&#x2019;s role evolves from direct controller to system supervisor&#x2013;a transformation requiring new interfaces, training paradigms, and conceptual frameworks for human-robot collaboration. Critical to this evolution is validated human factors design through extensive user studies. As emphasized in our prior work (<xref ref-type="bibr" rid="B11">Cho et al., 2025b</xref>), ecological validity requires testing under realistic surgical conditions, as laboratory findings may not translate to the operating room, where the perceived stakes modulate participant perception and behavior. Future systems must therefore be developed and validated in environments that capture the full complexity of surgical practice, ensuring that assurance methods work not just in principle but in the demanding reality of orthopedic and neurosurgical procedures.</p>
</sec>
<sec sec-type="conclusion" id="s6">
<label>6</label>
<title>Conclusion</title>
<p>The future of robot-assisted orthopedic and neurosurgery depends on both advancing technical capabilities and ensuring that these systems remain transparent, predictable, and meaningfully controllable to human operators. As surgical technologies continue to evolve in complexity and capability, parallel innovation in human-centered assurance methods is foundational to enable effective human-machine collaboration in safety-critical environments. Realizing this goal requires a shift from the current paradigm of humans adapting to technology to one in which technology is deliberately designed to augment human roles and responsibilities. Only through such proactive, human-centered approaches can emerging surgical technologies fulfill their promise of excellent surgical care while maintaining the safety, accountability, and trust.</p>
</sec>
</body>
<back>
<sec sec-type="author-contributions" id="s7">
<title>Author contributions</title>
<p>SC: Conceptualization, Investigation, Supervision, Writing &#x2013; original draft, Writing &#x2013; review and editing. XZ: Investigation, Writing &#x2013; original draft. LF: Investigation, Writing &#x2013; original draft. MU: Conceptualization, Supervision, Writing &#x2013; review and editing.</p>
</sec>
<sec sec-type="COI-statement" id="s9">
<title>Conflict of interest</title>
<p>The author(s) declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="ai-statement" id="s10">
<title>Generative AI statement</title>
<p>The author(s) declared that generative AI was used in the creation of this manuscript. Claude Opus 4 was used solely for language editing. All content was originally written by the authors and subsequently reviewed and edited after AI assistance.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p>
</sec>
<sec sec-type="disclaimer" id="s11">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Abdelaal</surname>
<given-names>A. E.</given-names>
</name>
<name>
<surname>Mathur</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Salcudean</surname>
<given-names>S. E.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Robotics <italic>in vivo:</italic> a perspective on human&#x2013;robot interaction in surgical robotics</article-title>. <source>Annu. Review Control, Robotics, Autonomous Systems</source> <volume>3</volume>, <fpage>221</fpage>&#x2013;<lpage>242</lpage>. <pub-id pub-id-type="doi">10.1146/annurev-control-091219-013437</pub-id>
</mixed-citation>
</ref>
<ref id="B2">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Agcaoglu</surname>
<given-names>O.</given-names>
</name>
<name>
<surname>Aliyev</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Taskin</surname>
<given-names>H. E.</given-names>
</name>
<name>
<surname>Chalikonda</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Walsh</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Costedio</surname>
<given-names>M. M.</given-names>
</name>
<etal/>
</person-group> (<year>2012</year>). <article-title>Malfunction and failure of robotic systems during general surgical procedures</article-title>. <source>Surg. Endoscopy</source> <volume>26</volume>, <fpage>3580</fpage>&#x2013;<lpage>3583</lpage>. <pub-id pub-id-type="doi">10.1007/s00464-012-2370-9</pub-id>
<pub-id pub-id-type="pmid">22678175</pub-id>
</mixed-citation>
</ref>
<ref id="B3">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Aguilera Saiz</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Groen</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Heerink</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Ruers</surname>
<given-names>T.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>The influence of the da vinci surgical robot on electromagnetic tracking in a clinical environment</article-title>. <source>J. Robotic Surgery</source> <volume>18</volume>, <fpage>54</fpage>. <pub-id pub-id-type="doi">10.1007/s11701-023-01812-7</pub-id>
<pub-id pub-id-type="pmid">38280064</pub-id>
</mixed-citation>
</ref>
<ref id="B4">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Alemzadeh</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Raman</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Leveson</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Kalbarczyk</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Iyer</surname>
<given-names>R. K.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Adverse events in robotic surgery: a retrospective study of 14 years of fda data</article-title>. <source>PloS One</source> <volume>11</volume>, <fpage>e0151470</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0151470</pub-id>
<pub-id pub-id-type="pmid">27097160</pub-id>
</mixed-citation>
</ref>
<ref id="B5">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name>
<surname>Amirian</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Carlson</surname>
<given-names>L. A.</given-names>
</name>
<name>
<surname>Gong</surname>
<given-names>M. F.</given-names>
</name>
<name>
<surname>Lohse</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Weiss</surname>
<given-names>K. R.</given-names>
</name>
<name>
<surname>Plate</surname>
<given-names>J. F.</given-names>
</name>
<etal/>
</person-group> (<year>2023</year>). &#x201c;<article-title>Explainable ai in orthopedics: challenges, opportunities, and prospects</article-title>,&#x201d; in <source>2023 congress in computer science, computer engineering, and applied computing (CSCE)</source> (<publisher-name>IEEE</publisher-name>), <fpage>1374</fpage>&#x2013;<lpage>1380</lpage>.</mixed-citation>
</ref>
<ref id="B6">
<mixed-citation publication-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Anjomshoae</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Najjar</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Calvaresi</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Fr&#xe4;mling</surname>
<given-names>K.</given-names>
</name>
</person-group> (<year>2019</year>). &#x201c;<article-title>Explainable agents and robots: results from a systematic literature review</article-title>,&#x201d; in <conf-name>18th International Conference on Autonomous Agents and Multiagent Systems (AAMAS 2019)</conf-name>, <conf-loc>Montreal, Canada</conf-loc>, <conf-date>May 13&#x2013;17, 2019</conf-date> (<publisher-name>International Foundation for Autonomous Agents and Multiagent Systems</publisher-name>), <fpage>1078</fpage>&#x2013;<lpage>1088</lpage>.</mixed-citation>
</ref>
<ref id="B7">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bunch</surname>
<given-names>K. M.</given-names>
</name>
<name>
<surname>Greeneway</surname>
<given-names>G. P.</given-names>
</name>
<name>
<surname>Ansari</surname>
<given-names>D. S.</given-names>
</name>
<name>
<surname>Patel</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Nottmeier</surname>
<given-names>E. W.</given-names>
</name>
<name>
<surname>Madhavan</surname>
<given-names>K. H.</given-names>
</name>
<etal/>
</person-group> (<year>2025</year>). <article-title>The symbiosis of robotics, enabling technology and minimally invasive surgery</article-title>. <source>North Am. Spine Soc. J. (NASSJ)</source> <volume>23</volume>, <fpage>100769</fpage>. <pub-id pub-id-type="doi">10.1016/j.xnsj.2025.100769</pub-id>
<pub-id pub-id-type="pmid">40837070</pub-id>
</mixed-citation>
</ref>
<ref id="B8">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name>
<surname>Chen</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Zou</surname>
<given-names>Q.</given-names>
</name>
<name>
<surname>Song</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Yu</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Zhu</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Song</surname>
<given-names>S.</given-names>
</name>
<etal/>
</person-group> (<year>2024</year>). &#x201c;<article-title>Visual attention based cognitive human&#x2013;robot collaboration for pedicle screw placement in robot-assisted orthopedic surgery</article-title>,&#x201d; in <source>2024 IEEE/RSJ international conference on intelligent robots and systems (IROS)</source> (<publisher-name>IEEE</publisher-name>), <fpage>7078</fpage>&#x2013;<lpage>7084</lpage>.</mixed-citation>
</ref>
<ref id="B9">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Cho</surname>
<given-names>S. M.</given-names>
</name>
<name>
<surname>Grupp</surname>
<given-names>R. B.</given-names>
</name>
<name>
<surname>Gomez</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Gupta</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Armand</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Osgood</surname>
<given-names>G.</given-names>
</name>
<etal/>
</person-group> (<year>2023</year>). <article-title>Visualization in 2d/3d registration matters for assuring technology-assisted image-guided surgery</article-title>. <source>Int. Journal Computer Assisted Radiology Surgery</source> <volume>18</volume>, <fpage>1017</fpage>&#x2013;<lpage>1024</lpage>. <pub-id pub-id-type="doi">10.1007/s11548-023-02888-0</pub-id>
<pub-id pub-id-type="pmid">37079247</pub-id>
</mixed-citation>
</ref>
<ref id="B10">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Cho</surname>
<given-names>S. M.</given-names>
</name>
<name>
<surname>Do</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Grupp</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Armand</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Taylor</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Unberath</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2025a</year>). <article-title>Uncertainty quantification in image-based 2d/3d registration and its relationship with accuracy: sm cho et al</article-title>. <source>Int. J. Comput. Assisted Radiology Surg.</source>, <fpage>1</fpage>&#x2013;<lpage>9</lpage>. <pub-id pub-id-type="doi">10.1007/s11548-025-03417-x</pub-id>
</mixed-citation>
</ref>
<ref id="B11">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name>
<surname>Cho</surname>
<given-names>S. M.</given-names>
</name>
<name>
<surname>Wu</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Kilmer</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Taylor</surname>
<given-names>R. H.</given-names>
</name>
<name>
<surname>Unberath</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2025b</year>). &#x201c;<article-title>Feeling the stakes: realism and ecological validity in user research for computer-assisted interventions</article-title>,&#x201d; in <source>International conference on medical image computing and computer-assisted intervention</source> (<publisher-name>Springer</publisher-name>), <fpage>189</fpage>&#x2013;<lpage>197</lpage>.</mixed-citation>
</ref>
<ref id="B12">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name>
<surname>Committee</surname>
<given-names>O.-R. A. D. O.</given-names>
</name>
</person-group> (<year>2021</year>). <source>Taxonomy and definitions for terms related to driving automation systems for on-road motor vehicles</source>. <publisher-name>SAE International</publisher-name>.</mixed-citation>
</ref>
<ref id="B13">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ding</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Seenivasan</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Killeen</surname>
<given-names>B. D.</given-names>
</name>
<name>
<surname>Cho</surname>
<given-names>S. M.</given-names>
</name>
<name>
<surname>Unberath</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Digital twins as a unifying framework for surgical data science: the enabling role of geometric scene understanding</article-title>. <source>Artif. Intell. Surg.</source> <volume>4</volume>, <fpage>109</fpage>&#x2013;<lpage>138</lpage>. <pub-id pub-id-type="doi">10.20517/ais.2024.16</pub-id>
</mixed-citation>
</ref>
<ref id="B14">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name>
<surname>Doshi</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Trivedi</surname>
<given-names>M. M.</given-names>
</name>
</person-group> (<year>2011</year>). &#x201c;<article-title>Tactical driver behavior prediction and intent inference: a review</article-title>,&#x201d; in <source>2011 14th international IEEE conference on intelligent transportation systems (ITSC)</source> (<publisher-name>IEEE</publisher-name>), <fpage>1892</fpage>&#x2013;<lpage>1897</lpage>.</mixed-citation>
</ref>
<ref id="B15">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Doulgeris</surname>
<given-names>J. J.</given-names>
</name>
<name>
<surname>Gonzalez-Blohm</surname>
<given-names>S. A.</given-names>
</name>
<name>
<surname>Filis</surname>
<given-names>A. K.</given-names>
</name>
<name>
<surname>Shea</surname>
<given-names>T. M.</given-names>
</name>
<name>
<surname>Aghayev</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Vrionis</surname>
<given-names>F. D.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Robotics in neurosurgery: evolution, current challenges, and compromises</article-title>. <source>Cancer Control.</source> <volume>22</volume>, <fpage>352</fpage>&#x2013;<lpage>359</lpage>. <pub-id pub-id-type="doi">10.1177/107327481502200314</pub-id>
<pub-id pub-id-type="pmid">26351892</pub-id>
</mixed-citation>
</ref>
<ref id="B16">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Endsley</surname>
<given-names>M. R.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>From here to autonomy: lessons learned from human&#x2013;automation research</article-title>. <source>Hum. Factors</source> <volume>59</volume>, <fpage>5</fpage>&#x2013;<lpage>27</lpage>. <pub-id pub-id-type="doi">10.1177/0018720816681350</pub-id>
<pub-id pub-id-type="pmid">28146676</pub-id>
</mixed-citation>
</ref>
<ref id="B17">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name>
<surname>Fehrentz</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Azampour</surname>
<given-names>M. F.</given-names>
</name>
<name>
<surname>Dorent</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Rasheed</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Galvin</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Golby</surname>
<given-names>A.</given-names>
</name>
<etal/>
</person-group> (<year>2024</year>). &#x201c;<article-title>Intraoperative registration by cross-modal inverse neural rendering</article-title>,&#x201d; in <source>International conference on medical image computing and computer-assisted intervention</source> (<publisher-name>Springer</publisher-name>), <fpage>317</fpage>&#x2013;<lpage>327</lpage>.</mixed-citation>
</ref>
<ref id="B18">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fosch-Villaronga</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Khanna</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Drukarch</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Custers</surname>
<given-names>B. H.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>A human in the loop in surgery automation</article-title>. <source>Nat. Mach. Intell.</source> <volume>3</volume>, <fpage>368</fpage>&#x2013;<lpage>369</lpage>. <pub-id pub-id-type="doi">10.1038/s42256-021-00349-4</pub-id>
</mixed-citation>
</ref>
<ref id="B19">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fotouhi</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Song</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Mehrfard</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Taylor</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>Q.</given-names>
</name>
<name>
<surname>Xian</surname>
<given-names>F.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>Reflective-ar display: an interaction methodology for virtual-to-real alignment in medical robotics</article-title>. <source>IEEE Robotics Automation Lett.</source> <volume>5</volume>, <fpage>2722</fpage>&#x2013;<lpage>2729</lpage>. <pub-id pub-id-type="doi">10.1109/lra.2020.2972831</pub-id>
</mixed-citation>
</ref>
<ref id="B20">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fridman</surname>
<given-names>L.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Human-centered autonomous vehicle systems: principles of effective shared autonomy</article-title>. <comment>arXiv preprint arXiv:1810.01835</comment>.</mixed-citation>
</ref>
<ref id="B21">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Frisken</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Luo</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Haouchine</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Pieper</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Wells</surname>
<given-names>W. M.</given-names>
</name>
<etal/>
</person-group> (<year>2021</year>). <article-title>Incorporating uncertainty into path planning for minimally invasive robotic neurosurgery</article-title>. <source>IEEE Trans. Med. Robotics Bionics</source> <volume>4</volume>, <fpage>5</fpage>&#x2013;<lpage>16</lpage>. <pub-id pub-id-type="doi">10.1109/tmrb.2021.3122357</pub-id>
</mixed-citation>
</ref>
<ref id="B22">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Geshvadi</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Dorent</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Galvin</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Rigolo</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Haouchine</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Kapur</surname>
<given-names>T.</given-names>
</name>
<etal/>
</person-group> (<year>2025</year>). <article-title>Optimizing registration uncertainty visualization to support intraoperative decision-making during brain tumor resection</article-title>. <source>Int. J. Comput. Assisted Radiology Surg.</source> <volume>20</volume>, <fpage>1</fpage>&#x2013;<lpage>9</lpage>. <pub-id pub-id-type="doi">10.1007/s11548-025-03407-z</pub-id>
<pub-id pub-id-type="pmid">40360961</pub-id>
</mixed-citation>
</ref>
<ref id="B23">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gundle</surname>
<given-names>K. R.</given-names>
</name>
<name>
<surname>White</surname>
<given-names>J. K.</given-names>
</name>
<name>
<surname>Conrad</surname>
<given-names>E. U.</given-names>
</name>
<name>
<surname>Ching</surname>
<given-names>R. P.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Accuracy and precision of a surgical navigation system: effect of camera and patient tracker position and number of active markers</article-title>. <source>Open Orthopaedics Journal</source> <volume>11</volume>, <fpage>493</fpage>&#x2013;<lpage>501</lpage>. <pub-id pub-id-type="doi">10.2174/1874325001711010493</pub-id>
<pub-id pub-id-type="pmid">28694888</pub-id>
</mixed-citation>
</ref>
<ref id="B24">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gurses</surname>
<given-names>M. E.</given-names>
</name>
<name>
<surname>Khalafallah</surname>
<given-names>A. M.</given-names>
</name>
<name>
<surname>Gecici</surname>
<given-names>N. N.</given-names>
</name>
<name>
<surname>G&#xf6;kalp</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Shah</surname>
<given-names>K. H.</given-names>
</name>
<name>
<surname>DeLong</surname>
<given-names>C. A.</given-names>
</name>
<etal/>
</person-group> (<year>2024</year>). <article-title>The safety, accuracy, and feasibility of robotic assistance in neuro-oncological surgery</article-title>. <source>Neurosurg. Focus</source> <volume>57</volume>, <fpage>E3</fpage>. <pub-id pub-id-type="doi">10.3171/2024.9.FOCUS24290</pub-id>
<pub-id pub-id-type="pmid">39616636</pub-id>
</mixed-citation>
</ref>
<ref id="B25">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Haddadin</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>De Luca</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Albu-Sch&#xe4;ffer</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Robot collisions: a survey on detection, isolation, and identification</article-title>. <source>IEEE Trans. Robotics</source> <volume>33</volume>, <fpage>1292</fpage>&#x2013;<lpage>1312</lpage>. <pub-id pub-id-type="doi">10.1109/tro.2017.2723903</pub-id>
</mixed-citation>
</ref>
<ref id="B26">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name>
<surname>Haidegger</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Kovacs</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Benyo</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Benyo</surname>
<given-names>Z.</given-names>
</name>
</person-group> (<year>2009</year>). &#x201c;<article-title>Spatial accuracy of surgical robots</article-title>,&#x201d; in <source>2009 5th international symposium on applied computational intelligence and informatics (IEEE)</source>, <fpage>133</fpage>&#x2013;<lpage>138</lpage>.</mixed-citation>
</ref>
<ref id="B27">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Han</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Huang</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Chen</surname>
<given-names>Y.-F.</given-names>
</name>
<name>
<surname>Lu</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>S.</given-names>
</name>
<etal/>
</person-group> (<year>2025</year>). <article-title>Artificial intelligence in orthopedic surgery: current applications, challenges, and future directions</article-title>. <source>MedComm</source> <volume>6</volume>, <fpage>e70260</fpage>. <pub-id pub-id-type="doi">10.1002/mco2.70260</pub-id>
<pub-id pub-id-type="pmid">40567249</pub-id>
</mixed-citation>
</ref>
<ref id="B28">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hoff</surname>
<given-names>K. A.</given-names>
</name>
<name>
<surname>Bashir</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Trust in automation: integrating empirical evidence on factors that influence trust</article-title>. <source>Hum. Factors</source> <volume>57</volume>, <fpage>407</fpage>&#x2013;<lpage>434</lpage>. <pub-id pub-id-type="doi">10.1177/0018720814547570</pub-id>
<pub-id pub-id-type="pmid">25875432</pub-id>
</mixed-citation>
</ref>
<ref id="B29">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Huang</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Xia</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Xiao</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Chan</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Liang</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Florence</surname>
<given-names>P.</given-names>
</name>
<etal/>
</person-group> (<year>2022</year>). <article-title>Inner monologue: embodied reasoning through planning with language models</article-title>. <source>arXiv Preprint arXiv:2207.05608</source>. <pub-id pub-id-type="doi">10.48550/arXiv.2207.05608</pub-id>
</mixed-citation>
</ref>
<ref id="B30">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name>
<surname>Kam</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Saeidi</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Hsieh</surname>
<given-names>M. H.</given-names>
</name>
<name>
<surname>Kang</surname>
<given-names>J. U.</given-names>
</name>
<name>
<surname>Krieger</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2021</year>). &#x201c;<article-title>A confidence-based supervised-autonomous control strategy for robotic vaginal cuff closure</article-title>,&#x201d; in <source>2021 IEEE international conference on robotics and automation (ICRA)</source> (<publisher-name>IEEE</publisher-name>), <fpage>12261</fpage>&#x2013;<lpage>12267</lpage>.</mixed-citation>
</ref>
<ref id="B31">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kantak</surname>
<given-names>P. A.</given-names>
</name>
<name>
<surname>Bartlett</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Chaker</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Harmon</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Mansour</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Pawloski</surname>
<given-names>J.</given-names>
</name>
<etal/>
</person-group> (<year>2024</year>). <article-title>Augmented reality registration system for visualization of skull landmarks</article-title>. <source>World Neurosurg.</source> <volume>182</volume>, <fpage>e369</fpage>&#x2013;<lpage>e376</lpage>. <pub-id pub-id-type="doi">10.1016/j.wneu.2023.11.110</pub-id>
<pub-id pub-id-type="pmid">38013107</pub-id>
</mixed-citation>
</ref>
<ref id="B32">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Karas</surname>
<given-names>C. S.</given-names>
</name>
<name>
<surname>Chiocca</surname>
<given-names>E. A.</given-names>
</name>
</person-group> (<year>2007</year>). <article-title>Neurosurgical robotics: a review of brain and spine applications</article-title>. <source>J. Robotic Surgery</source> <volume>1</volume>, <fpage>39</fpage>&#x2013;<lpage>43</lpage>. <pub-id pub-id-type="doi">10.1007/s11701-006-0006-6</pub-id>
<pub-id pub-id-type="pmid">25484937</pub-id>
</mixed-citation>
</ref>
<ref id="B33">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Killeen</surname>
<given-names>B. D.</given-names>
</name>
<name>
<surname>Cho</surname>
<given-names>S. M.</given-names>
</name>
<name>
<surname>Armand</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Taylor</surname>
<given-names>R. H.</given-names>
</name>
<name>
<surname>Unberath</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>
<italic>In silico</italic> simulation: a key enabling technology for next-generation intelligent surgical systems</article-title>. <source>Prog. Biomed. Eng.</source> <volume>5</volume>, <fpage>032001</fpage>. <pub-id pub-id-type="doi">10.1088/2516-1091/acd28b</pub-id>
</mixed-citation>
</ref>
<ref id="B34">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Killeen</surname>
<given-names>B. D.</given-names>
</name>
<name>
<surname>Chaudhary</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Osgood</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Unberath</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Take a shot! natural language control of intelligent robotic x-ray systems in surgery</article-title>. <source>Int. Journal Computer Assisted Radiology Surgery</source> <volume>19</volume>, <fpage>1165</fpage>&#x2013;<lpage>1173</lpage>. <pub-id pub-id-type="doi">10.1007/s11548-024-03120-3</pub-id>
<pub-id pub-id-type="pmid">38619790</pub-id>
</mixed-citation>
</ref>
<ref id="B35">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Killeen</surname>
<given-names>B. D.</given-names>
</name>
<name>
<surname>Suresh</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Gomez</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>&#xcd;&#xf1;igo</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Bailey</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Unberath</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>Intelligent control of robotic x-ray devices using a language-promptable digital twin</article-title>. <source>Int. J. Comput. Assisted Radiology Surg.</source> <volume>20</volume>, <fpage>1</fpage>&#x2013;<lpage>10</lpage>. <pub-id pub-id-type="doi">10.1007/s11548-025-03351-y</pub-id>
<pub-id pub-id-type="pmid">40205315</pub-id>
</mixed-citation>
</ref>
<ref id="B36">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kord</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Kluge</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Kufeld</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Kalinauskaite</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Loebel</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Stromberger</surname>
<given-names>C.</given-names>
</name>
<etal/>
</person-group> (<year>2021</year>). <article-title>Risks and benefits of fiducial marker placement in tumor lesions for robotic radiosurgery: technical outcomes of 357 implantations</article-title>. <source>Cancers</source> <volume>13</volume>, <fpage>4838</fpage>. <pub-id pub-id-type="doi">10.3390/cancers13194838</pub-id>
<pub-id pub-id-type="pmid">34638321</pub-id>
</mixed-citation>
</ref>
<ref id="B37">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kyeremeh</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Asciak</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Blackmur</surname>
<given-names>J. P.</given-names>
</name>
<name>
<surname>Luo</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Picard</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Shu</surname>
<given-names>W.</given-names>
</name>
<etal/>
</person-group> (<year>2025</year>). <article-title>Digital twins assisted surgery: a conceptual framework for transforming surgical training and navigation</article-title>. <source>Surg</source>. <pub-id pub-id-type="doi">10.1016/j.surge.2025.09.007</pub-id>
<pub-id pub-id-type="pmid">40992966</pub-id>
</mixed-citation>
</ref>
<ref id="B38">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Langlotz</surname>
<given-names>F.</given-names>
</name>
</person-group> (<year>2004</year>). <article-title>Potential pitfalls of computer aided orthopedic surgery</article-title>. <source>Injury</source> <volume>35</volume>, <fpage>A17</fpage>&#x2013;<lpage>A23</lpage>. <pub-id pub-id-type="doi">10.1016/j.injury.2004.05.006</pub-id>
<pub-id pub-id-type="pmid">15183699</pub-id>
</mixed-citation>
</ref>
<ref id="B39">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lasota</surname>
<given-names>P. A.</given-names>
</name>
<name>
<surname>Fong</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Shah</surname>
<given-names>J. A.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>A survey of methods for safe human-robot interaction</article-title>. <source>Found. Trends&#xae; Robotics</source> <volume>5</volume>, <fpage>261</fpage>&#x2013;<lpage>349</lpage>. <pub-id pub-id-type="doi">10.1561/2300000052</pub-id>
</mixed-citation>
</ref>
<ref id="B40">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lee</surname>
<given-names>J. D.</given-names>
</name>
<name>
<surname>See</surname>
<given-names>K. A.</given-names>
</name>
</person-group> (<year>2004</year>). <article-title>Trust in automation: designing for appropriate reliance</article-title>. <source>Hum. Factors</source> <volume>46</volume>, <fpage>50</fpage>&#x2013;<lpage>80</lpage>. <pub-id pub-id-type="doi">10.1518/hfes.46.1.50_30392</pub-id>
<pub-id pub-id-type="pmid">15151155</pub-id>
</mixed-citation>
</ref>
<ref id="B41">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lee</surname>
<given-names>Y.-S.</given-names>
</name>
<name>
<surname>Cho</surname>
<given-names>D.-C.</given-names>
</name>
<name>
<surname>Kim</surname>
<given-names>K.-T.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Navigation-guided/robot-assisted spinal surgery: a review article</article-title>. <source>Neurospine</source> <volume>21</volume>, <fpage>8</fpage>&#x2013;<lpage>17</lpage>. <pub-id pub-id-type="doi">10.14245/ns.2347184.592</pub-id>
<pub-id pub-id-type="pmid">38569627</pub-id>
</mixed-citation>
</ref>
<ref id="B42">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liebmann</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>von Atzigen</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>St&#xfc;tz</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Wolf</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Zingg</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Suter</surname>
<given-names>D.</given-names>
</name>
<etal/>
</person-group> (<year>2024</year>). <article-title>Automatic registration with continuous pose updates for marker-less surgical navigation in spine surgery</article-title>. <source>Med. Image Anal.</source> <volume>91</volume>, <fpage>103027</fpage>. <pub-id pub-id-type="doi">10.1016/j.media.2023.103027</pub-id>
<pub-id pub-id-type="pmid">37992494</pub-id>
</mixed-citation>
</ref>
<ref id="B43">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Wong</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Razjigaev</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Beier</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Peng</surname>
<given-names>S.</given-names>
</name>
<etal/>
</person-group> (<year>2024</year>). <article-title>A review on the form and complexity of human&#x2013;robot interaction in the evolution of autonomous surgery</article-title>. <source>Adv. Intell. Syst.</source> <volume>6</volume>, <fpage>2400197</fpage>. <pub-id pub-id-type="doi">10.1002/aisy.202400197</pub-id>
</mixed-citation>
</ref>
<ref id="B44">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name>
<surname>Mainprice</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Berenson</surname>
<given-names>D.</given-names>
</name>
</person-group> (<year>2013</year>). &#x201c;<article-title>Human-robot collaborative manipulation planning using early prediction of human motion</article-title>,&#x201d; in <source>2013 IEEE/RSJ international conference on intelligent robots and systems (IEEE)</source>, <fpage>299</fpage>&#x2013;<lpage>306</lpage>.</mixed-citation>
</ref>
<ref id="B45">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Misir</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Yuce</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>Ai in orthopedic research: a comprehensive review</article-title>. <source>J. Orthop. Research&#xae;</source>. <pub-id pub-id-type="doi">10.1002/jor.26109</pub-id>
<pub-id pub-id-type="pmid">40415515</pub-id>
</mixed-citation>
</ref>
<ref id="B46">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Moccia</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Foti</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Routray</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Prudente</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Perin</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Sekula</surname>
<given-names>R. F.</given-names>
</name>
<etal/>
</person-group> (<year>2018</year>). <article-title>Toward improving safety in neurosurgery with an active handheld instrument</article-title>. <source>Ann. Biomedical Engineering</source> <volume>46</volume>, <fpage>1450</fpage>&#x2013;<lpage>1464</lpage>. <pub-id pub-id-type="doi">10.1007/s10439-018-2091-x</pub-id>
<pub-id pub-id-type="pmid">30014286</pub-id>
</mixed-citation>
</ref>
<ref id="B47">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mulyadi</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Hutami</surname>
<given-names>W. D.</given-names>
</name>
<name>
<surname>Suganda</surname>
<given-names>K. D.</given-names>
</name>
<name>
<surname>Khalisha</surname>
<given-names>D. F.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Risk of neurologic deficit in medially breached pedicle screws assessed by computed tomography: a systematic review</article-title>. <source>Asian Spine J.</source> <volume>18</volume>, <fpage>903</fpage>&#x2013;<lpage>912</lpage>. <pub-id pub-id-type="doi">10.31616/asj.2024.0325</pub-id>
<pub-id pub-id-type="pmid">39763360</pub-id>
</mixed-citation>
</ref>
<ref id="B48">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Osman</surname>
<given-names>E. I. A.</given-names>
</name>
<name>
<surname>Ismail</surname>
<given-names>M. M. E. M.</given-names>
</name>
<name>
<surname>Mukhtar</surname>
<given-names>M. A. H.</given-names>
</name>
<name>
<surname>Ahmed</surname>
<given-names>A. U. B.</given-names>
</name>
<name>
<surname>Mohamed</surname>
<given-names>N. A. A. E.</given-names>
</name>
<name>
<surname>Ibrahim</surname>
<given-names>A. A. A.</given-names>
</name>
<etal/>
</person-group> (<year>2025</year>). <article-title>Artificial intelligence and robotics in minimally invasive and complex surgical procedures: a systematic review</article-title>. <source>Cureus</source> <volume>17</volume>. <pub-id pub-id-type="doi">10.7759/cureus.81339</pub-id>
</mixed-citation>
</ref>
<ref id="B49">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name>
<surname>&#xd6;zsoy</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Pellegrini</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Czempiel</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Tristram</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Yuan</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Bani-Harouni</surname>
<given-names>D.</given-names>
</name>
<etal/>
</person-group> (<year>2025</year>). &#x201c;<article-title>Mm-or: a large multimodal operating room dataset for semantic understanding of high-intensity surgical environments</article-title>,&#x201d; in <source>Proceedings of the computer vision and pattern recognition conference</source>, <fpage>19378</fpage>&#x2013;<lpage>19389</lpage>.</mixed-citation>
</ref>
<ref id="B50">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>O&#x2019;Sullivan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Nevejans</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Allen</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Blyth</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Leonard</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Pagallo</surname>
<given-names>U.</given-names>
</name>
<etal/>
</person-group> (<year>2019</year>). <article-title>Legal, regulatory, and ethical frameworks for development of standards in artificial intelligence (ai) and autonomous robotic surgery</article-title>. <source>International Journal Medical Robotics Computer Assisted Surgery</source> <volume>15</volume>, <fpage>e1968</fpage>. <pub-id pub-id-type="doi">10.1002/rcs.1968</pub-id>
</mixed-citation>
</ref>
<ref id="B51">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pagani</surname>
<given-names>N. R.</given-names>
</name>
<name>
<surname>Menendez</surname>
<given-names>M. E.</given-names>
</name>
<name>
<surname>Moverman</surname>
<given-names>M. A.</given-names>
</name>
<name>
<surname>Puzzitiello</surname>
<given-names>R. N.</given-names>
</name>
<name>
<surname>Gordon</surname>
<given-names>M. R.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Adverse events associated with robotic-assisted joint arthroplasty: an analysis of the us food and drug administration maude database</article-title>. <source>J. Arthroplasty</source> <volume>37</volume>, <fpage>1526</fpage>&#x2013;<lpage>1533</lpage>. <pub-id pub-id-type="doi">10.1016/j.arth.2022.03.060</pub-id>
<pub-id pub-id-type="pmid">35314290</pub-id>
</mixed-citation>
</ref>
<ref id="B52">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Panesar</surname>
<given-names>S. S.</given-names>
</name>
<name>
<surname>Kliot</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Parrish</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Fernandez-Miranda</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Cagle</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Britz</surname>
<given-names>G. W.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Promises and perils of artificial intelligence in neurosurgery</article-title>. <source>Neurosurgery</source> <volume>87</volume>, <fpage>33</fpage>&#x2013;<lpage>44</lpage>. <pub-id pub-id-type="doi">10.1093/neuros/nyz471</pub-id>
<pub-id pub-id-type="pmid">31748800</pub-id>
</mixed-citation>
</ref>
<ref id="B53">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Parasuraman</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Manzey</surname>
<given-names>D. H.</given-names>
</name>
</person-group> (<year>2010</year>). <article-title>Complacency and bias in human use of automation: an attentional integration</article-title>. <source>Hum. Factors</source> <volume>52</volume>, <fpage>381</fpage>&#x2013;<lpage>410</lpage>. <pub-id pub-id-type="doi">10.1177/0018720810376055</pub-id>
<pub-id pub-id-type="pmid">21077562</pub-id>
</mixed-citation>
</ref>
<ref id="B54">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Payne</surname>
<given-names>C. J.</given-names>
</name>
<name>
<surname>Vyas</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Bautista-Salinas</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Zhang</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Marcus</surname>
<given-names>H. J.</given-names>
</name>
<name>
<surname>Yang</surname>
<given-names>G.-Z.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Shared-control robots</article-title>. <source>Neurosurg. Robot.</source>, <fpage>63</fpage>&#x2013;<lpage>79</lpage>. <pub-id pub-id-type="doi">10.1007/978-1-0716-0993-4_4</pub-id>
</mixed-citation>
</ref>
<ref id="B55">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ram</surname>
<given-names>P. R.</given-names>
</name>
<name>
<surname>Jeyaraman</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Jeyaraman</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Yadav</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Venkatasalam</surname>
<given-names>R.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Revolutionizing orthopedic healthcare: the role of robotics</article-title>. <source>Cureus</source> <volume>15</volume>, <fpage>e44820</fpage>. <pub-id pub-id-type="doi">10.7759/cureus.44820</pub-id>
<pub-id pub-id-type="pmid">37809251</pub-id>
</mixed-citation>
</ref>
<ref id="B56">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Rivero-Moreno</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Echevarria</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Vidal-Valderrama</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Pianetti</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Cordova-Guilarte</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Navarro-Gonzalez</surname>
<given-names>J.</given-names>
</name>
<etal/>
</person-group> (<year>2023</year>). <article-title>Robotic surgery: a comprehensive review of the literature and current trends</article-title>. <source>Cureus</source> <volume>15</volume>, <fpage>e42370</fpage>. <pub-id pub-id-type="doi">10.7759/cureus.42370</pub-id>
<pub-id pub-id-type="pmid">37621804</pub-id>
</mixed-citation>
</ref>
<ref id="B57">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Saeedi-Hosseiny</surname>
<given-names>M. S.</given-names>
</name>
<name>
<surname>Alruwaili</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Clancy</surname>
<given-names>M. P.</given-names>
</name>
<name>
<surname>Corson</surname>
<given-names>E. A.</given-names>
</name>
<name>
<surname>McMillan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Papachristou</surname>
<given-names>C.</given-names>
</name>
<etal/>
</person-group> (<year>2023</year>). <article-title>Automatic alignment of fractured femur: integration of robot and optical tracking system</article-title>. <source>IEEE Robotics Automation Lett.</source> <volume>8</volume>, <fpage>2438</fpage>&#x2013;<lpage>2445</lpage>. <pub-id pub-id-type="doi">10.1109/lra.2023.3251198</pub-id>
</mixed-citation>
</ref>
<ref id="B58">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name>
<surname>Saeidi</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Opfermann</surname>
<given-names>J. D.</given-names>
</name>
<name>
<surname>Kam</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Raghunathan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>L&#xe9;onard</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Krieger</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2018</year>). &#x201c;<article-title>A confidence-based shared control strategy for the smart tissue autonomous robot (star)</article-title>,&#x201d; in <source>2018 IEEE/RSJ international conference on intelligent robots and systems (IROS)</source> (<publisher-name>IEEE</publisher-name>), <fpage>1268</fpage>&#x2013;<lpage>1275</lpage>.</mixed-citation>
</ref>
<ref id="B59">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Seenivasan</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Xu</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Mukul</surname>
<given-names>R. D. S.</given-names>
</name>
<name>
<surname>Ding</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Byrd</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Ku</surname>
<given-names>Y.-C.</given-names>
</name>
<etal/>
</person-group> (<year>2025</year>). <article-title>Beyond rigid ai: towards natural human-machine symbiosis for interoperative surgical assistance</article-title>. <comment>arXiv preprint arXiv:2507.2308</comment>.</mixed-citation>
</ref>
<ref id="B60">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shah</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Thakor</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Shah</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Singh</surname>
<given-names>O. V.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>Software as medical devices: requirements and regulatory landscape in the United States</article-title>. <source>Expert Rev. Med. Devices</source> <volume>22</volume>, <fpage>1201</fpage>&#x2013;<lpage>1214</lpage>. <pub-id pub-id-type="doi">10.1080/17434440.2025.2561918</pub-id>
<pub-id pub-id-type="pmid">40947420</pub-id>
</mixed-citation>
</ref>
<ref id="B61">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shahid</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Raza</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Qureshi</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Cheema</surname>
<given-names>U.</given-names>
</name>
<name>
<surname>Muneer</surname>
<given-names>S. U.</given-names>
</name>
<name>
<surname>Sehar</surname>
<given-names>A.</given-names>
</name>
<etal/>
</person-group> (<year>2025</year>). <article-title>Effectiveness and reliability of ai in diagnosis and robot-assisted spinal and cranial surgery: efficient outcomes and ethical worries</article-title>. <source>Ann. Med. Surg.</source> <volume>87</volume>, <fpage>7236</fpage>&#x2013;<lpage>7243</lpage>. <pub-id pub-id-type="doi">10.1097/MS9.0000000000003865</pub-id>
<pub-id pub-id-type="pmid">41180681</pub-id>
</mixed-citation>
</ref>
<ref id="B62">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>&#x160;uligoj</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>&#x160;vaco</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Jerbi&#x107;</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>&#x160;ekoranja</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Vidakovi&#x107;</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Automated marker localization in the planning phase of robotic neurosurgery</article-title>. <source>IEEE Access</source> <volume>5</volume>, <fpage>12265</fpage>&#x2013;<lpage>12274</lpage>. <pub-id pub-id-type="doi">10.1109/ACCESS.2017.2718621</pub-id>
</mixed-citation>
</ref>
<ref id="B63">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tafti</surname>
<given-names>A. P.</given-names>
</name>
<name>
<surname>Gu</surname>
<given-names>Q.</given-names>
</name>
<name>
<surname>Plate</surname>
<given-names>J. F.</given-names>
</name>
</person-group> (<year>2025</year>). <article-title>Uncertainty quantification and explainable ai in orthopaedic imaging: a timely call to action</article-title>. <source>J. Clin. Orthop. Trauma</source> <volume>70</volume>, <fpage>103208</fpage>. <pub-id pub-id-type="doi">10.1016/j.jcot.2025.103208</pub-id>
<pub-id pub-id-type="pmid">41089298</pub-id>
</mixed-citation>
</ref>
<ref id="B64">
<mixed-citation publication-type="book">
<person-group person-group-type="author">
<name>
<surname>Tomasz</surname>
<given-names>O.</given-names>
</name>
<name>
<surname>Waldemar</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Kajetan</surname>
<given-names>&#x141;.</given-names>
</name>
<name>
<surname>Jacek</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Tomasz</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Dariusz</surname>
<given-names>&#x141;.</given-names>
</name>
</person-group> (<year>2021</year>). &#x201c;<article-title>Robotics in neurosurgery&#x2013;past, presence and future</article-title>,&#x201d; in <source>International scientific conference on brain-computer interfaces BCI opole</source> (<publisher-name>Springer</publisher-name>), <fpage>1</fpage>&#x2013;<lpage>8</lpage>.</mixed-citation>
</ref>
<ref id="B65">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Unberath</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Gao</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Hu</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Judish</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Taylor</surname>
<given-names>R. H.</given-names>
</name>
<name>
<surname>Armand</surname>
<given-names>M.</given-names>
</name>
<etal/>
</person-group> (<year>2021</year>). <article-title>The impact of machine learning on 2d/3d registration for image-guided interventions: a systematic review and perspective</article-title>. <source>Front. Robotics AI</source> <volume>8</volume>, <fpage>716007</fpage>. <pub-id pub-id-type="doi">10.3389/frobt.2021.716007</pub-id>
<pub-id pub-id-type="pmid">34527706</pub-id>
</mixed-citation>
</ref>
<ref id="B66">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Varnavas</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Carrell</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Penney</surname>
<given-names>G.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Fully automated 2d&#x2013;3d registration and verification</article-title>. <source>Med. Image Analysis</source> <volume>26</volume>, <fpage>108</fpage>&#x2013;<lpage>119</lpage>. <pub-id pub-id-type="doi">10.1016/j.media.2015.08.005</pub-id>
<pub-id pub-id-type="pmid">26387052</pub-id>
</mixed-citation>
</ref>
<ref id="B67">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>V&#xf6;r&#xf6;s</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Davoodi</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Wybaillie</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Vander Poorten</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Niu</surname>
<given-names>K.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>An augmented reality-based interaction scheme for robotic pedicle screw placement</article-title>. <source>J. Imaging</source> <volume>8</volume>, <fpage>273</fpage>. <pub-id pub-id-type="doi">10.3390/jimaging8100273</pub-id>
<pub-id pub-id-type="pmid">36286367</pub-id>
</mixed-citation>
</ref>
<ref id="B68">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Xian</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Sun</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Luo</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Hu</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Zou</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Chan</surname>
<given-names>D. T. M.</given-names>
</name>
<etal/>
</person-group> (<year>2025</year>). <article-title>Task automated stereotactic brain biopsy robotic system with clf-cbf-based safety-critical neuronavigation</article-title>. <source>IEEE/ASME Trans. Mechatronics</source>. <pub-id pub-id-type="doi">10.1109/TMECH.2025.3540056</pub-id>
</mixed-citation>
</ref>
<ref id="B69">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yang</surname>
<given-names>G.-Z.</given-names>
</name>
<name>
<surname>Cambias</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Cleary</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Daimler</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Drake</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Dupont</surname>
<given-names>P. E.</given-names>
</name>
<etal/>
</person-group> (<year>2017</year>). <article-title>Medical robotics&#x2013;regulatory, ethical, and legal considerations for increasing levels of autonomy</article-title>. <source>Sci. Robotics</source> <volume>2</volume>, <fpage>eaam8638</fpage>. <pub-id pub-id-type="doi">10.1126/scirobotics.aam8638</pub-id>
<pub-id pub-id-type="pmid">33157870</pub-id>
</mixed-citation>
</ref>
</ref-list>
<fn-group>
<fn fn-type="custom" custom-type="edited-by">
<p>
<bold>Edited by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/639928/overview">Tom Vercauteren</ext-link>, King&#x2019;s College London, United Kingdom</p>
</fn>
<fn fn-type="custom" custom-type="reviewed-by">
<p>
<bold>Reviewed by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2391016/overview">Lennart Karstensen</ext-link>, Friedrich-Alexander-Universit&#xe4;t Erlangen-N&#xfc;rnberg, Germany</p>
<p>
<ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3308188/overview">Martin Huber</ext-link>, King&#x2019;s College London, United Kingdom</p>
</fn>
</fn-group>
</back>
</article>