<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="review-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Oncol.</journal-id>
<journal-title>Frontiers in Oncology</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Oncol.</abbrev-journal-title>
<issn pub-type="epub">2234-943X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fonc.2023.1116761</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Oncology</subject>
<subj-group>
<subject>Mini Review</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Artificial intelligence in improving the outcome of surgical treatment in colorectal cancer</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Avram</surname>
<given-names>Mihaela Flavia</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="author-notes" rid="fn001">
<sup>*</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1928768"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Laz&#x103;r</surname>
<given-names>Daniela Cornelia</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Mari&#x15f;</surname>
<given-names>Mihaela Ioana</given-names>
</name>
<xref ref-type="aff" rid="aff4">
<sup>4</sup>
</xref>
<xref ref-type="aff" rid="aff5">
<sup>5</sup>
</xref>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Olariu</surname>
<given-names>Sorin</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
</contrib>
</contrib-group>
<aff id="aff1">
<sup>1</sup>
<institution>Department of Surgery X, 1st Surgery Discipline, &#x201c;Victor Babe&#x15f;&#x201d; University of Medicine and Pharmacy Timi&#x15f;oara</institution>, <addr-line>Timi&#x15f;oara</addr-line>, <country>Romania</country>
</aff>
<aff id="aff2">
<sup>2</sup>
<institution>Department of Mathematics, Politehnica University Timisoara</institution>, <addr-line>Timi&#x15f;oara</addr-line>, <country>Romania</country>
</aff>
<aff id="aff3">
<sup>3</sup>
<institution>Department V of Internal Medicine I, Discipline of Internal Medicine IV, &#x201c;Victor Babe&#x15f;&#x201d; University of Medicine and Pharmacy Timi&#x15f;oara</institution>, <addr-line>Timi&#x15f;oara</addr-line>, <country>Romania</country>
</aff>
<aff id="aff4">
<sup>4</sup>
<institution>Department of Functional Sciences, Division of Physiopathology, &#x201c;Victor Babes&#x201d; University of Medicine and Pharmacy Timisoara</institution>, <addr-line>Timisoara</addr-line>, <country>Romania</country>
</aff>
<aff id="aff5">
<sup>5</sup>
<institution>Center for Translational Research and Systems Medicine, &#x201c;Victor Babes&#x201d; University of Medicine and Pharmacy Timisoara</institution>, <addr-line>Timisoara</addr-line>, <country>Romania</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>Edited by: Pasquale Cianci, Azienda Sanitaria Localedella Provincia di Barletta Andri Trani (ASL BT), Italy</p>
</fn>
<fn fn-type="edited-by">
<p>Reviewed by: Altamura Amedeo, Pia Fondazione di Culto e Religione Card. G. Panico, Italy; Vincenzo Lizzi, Azienda Ospedaliero-Universitaria Ospedali Riuniti di Foggia, Italy</p>
</fn>
<fn fn-type="corresp" id="fn001">
<p>*Correspondence: Mihaela Flavia Avram, <email xlink:href="mailto:avram.mihaela@umft.ro">avram.mihaela@umft.ro</email>
</p>
</fn>
<fn fn-type="other" id="fn002">
<p>This article was submitted to Surgical Oncology, a section of the journal Frontiers in Oncology</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>17</day>
<month>01</month>
<year>2023</year>
</pub-date>
<pub-date pub-type="collection">
<year>2023</year>
</pub-date>
<volume>13</volume>
<elocation-id>1116761</elocation-id>
<history>
<date date-type="received">
<day>05</day>
<month>12</month>
<year>2022</year>
</date>
<date date-type="accepted">
<day>03</day>
<month>01</month>
<year>2023</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2023 Avram, Laz&#x103;r, Mari&#x15f; and Olariu</copyright-statement>
<copyright-year>2023</copyright-year>
<copyright-holder>Avram, Laz&#x103;r, Mari&#x15f; and Olariu</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<sec>
<title>Background</title>
<p>A considerable number of recent research have used artificial intelligence (AI) in the area of colorectal cancer (CRC). Surgical treatment of CRC still remains the most important curative component. Artificial intelligence in CRC surgery is not nearly as advanced as it is in screening (colonoscopy), diagnosis and prognosis, especially due to the increased complexity and variability of structures and elements in all fields of view, as well as a general shortage of annotated video banks for utilization.</p>
</sec>
<sec>
<title>Methods</title>
<p>A literature search was made and relevant studies were included in the minireview.</p>
</sec>
<sec>
<title>Results</title>
<p>The intraoperative steps which, at this moment, can benefit from AI in CRC are: phase and action recognition, excision plane navigation, endoscopy control, real-time circulation analysis, knot tying, automatic optical biopsy and hyperspectral imaging. This minireview also analyses the current advances in robotic treatment of CRC as well as the present possibility of automated CRC robotic surgery.</p>
</sec>
<sec>
<title>Conclusions</title>
<p>The use of AI in CRC surgery is still at its beginnings. The development of AI models capable of reproducing a colorectal expert surgeon&#x2019;s skill, the creation of large and complex datasets and the standardization of surgical colorectal procedures will contribute to the widespread use of AI in CRC surgical treatment.</p>
</sec>
</abstract>
<kwd-group>
<kwd>artificial intelligence</kwd>
<kwd>colorectal cancer</kwd>
<kwd>automated robotic surgery</kwd>
<kwd>phase recognition</kwd>
<kwd>excision plane navigation</kwd>
<kwd>endoscopy control</kwd>
<kwd>annotated video banks</kwd>
</kwd-group>
<counts>
<fig-count count="1"/>
<table-count count="1"/>
<equation-count count="0"/>
<ref-count count="47"/>
<page-count count="7"/>
<word-count count="3159"/>
</counts>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>Colorectal cancer (CRC) is the second most prevalent cause of cancer-related deaths worldwide and the third most common malignancy in both men and women, respectively (<xref ref-type="bibr" rid="B1">1</xref>, <xref ref-type="bibr" rid="B2">2</xref>),. With liver metastases present in nearly 20% of cases, 60&#x2013;70% of individuals with clinical symptoms of CRC are detected at advanced stages. Additionally, individuals with metastatic dissemination at the time of diagnosis had a 5-year overall survival rate of only 10-15%, compared to patients with local malignancy, which ranges from 80-90% (<xref ref-type="bibr" rid="B3">3</xref>).</p>
<p>Artificial intelligence (AI) is a branch of computer science that focuses on creating intelligent computers capable of performing activities that normally necessitate human intelligence. Several Ai technologies exist all around us, but understanding and evaluating their impact on today&#x2019;s society might be difficult. Deep learning algorithms and support vector machines (SVMs) have made important contributions to this advanced technology during the last decade, playing a key role in medical and healthcare systems (<xref ref-type="bibr" rid="B4">4</xref>).</p>
<p>There are two types of AI applications in the medical field: virtual and physical. The virtual component of AI is made up of machine learning (ML) and deep learning (DL, a subset of ML) (<xref ref-type="bibr" rid="B5">5</xref>). There are three types of machine learning algorithms: supervised, unsupervised, and reinforcement learning. Meanwhile, the most well-known deep learning scheme, a convolutional neural network (CNN), is a sort of multilayer artificial neural network that is extremely efficient for image categorization (<xref ref-type="bibr" rid="B6">6</xref>).</p>
<p>Artificial neural networks (ANNs) are ML tools. In function, they mimic the human brain by connecting and discovering complicated relationships and patterns in data. ANNs are made up of numerous computational units (neurons) that accept inputs, execute calculations, and send output to the next computational unit. The input is processed as signals by layers of algorithms, which produce specific patterns as final output, which are interpreted and employed in decision-making. Simple 1- or 2-layered neural networks are typically used in ANNs (<xref ref-type="bibr" rid="B7">7</xref>, <xref ref-type="bibr" rid="B8">8</xref>).</p>
<p>Computer vision (CV) is focused on how computers may learn to understand digital images and videos (such as object and scene recognition) at a high level, in a manner similar to the human eye. 2 The processed data can include video sequences, several camera perspectives, or multidimensional data from a medical scanning instrument (<xref ref-type="bibr" rid="B7">7</xref>, <xref ref-type="bibr" rid="B9">9</xref>&#x2013;<xref ref-type="bibr" rid="B11">11</xref>).</p>
<p>The physical branch of AI includes medical devices and robots, such as the Da Vinci Surgical System (Intuitive Surgical Inc., Sunnyvale, CA, USA), as well as nanorobots.</p>
<p>A considerable number of recent research have used AI in the area of CRC (<xref ref-type="bibr" rid="B7">7</xref>&#x2013;<xref ref-type="bibr" rid="B9">9</xref>). From the standpoint of clinical practice, the available AI applications in CRC primarily contain four clinical aspects (<xref ref-type="bibr" rid="B10">10</xref>):</p>
<list list-type="bullet">
<list-item>
<p>Screening: Endoscopy is the gold standard for CRC screening. AI-assisted colonoscopy for polyp detection and characterization, risk prediction models using clinical and omics data, are expected to improve CRC screening.</p>
</list-item>
<list-item>
<p>Diagnosis: The qualitative diagnosis and staging of CRC are mostly based on imaging and pathological examination. DL can greatly increase medical image interpretation, minimize disparities in experience, and reduce misinterpretation rates thanks to powerful image recognition processing technology (<xref ref-type="bibr" rid="B9">9</xref>).</p>
</list-item>
<list-item>
<p>Treatment: The treatment of CRC mainly consists of surgery, chemotherapy and radiotherapy. Novel therapies can be evaluated with the help of AI, while AI can provide a more precise treatment choice, individually tailored on each patient (<xref ref-type="bibr" rid="B11">11</xref>).</p>
</list-item>
<list-item>
<p>Prognosis: Predicting the recurrence and estimating survival is more accurate using ML approach, as it uses various multidimensional information. Deep learning has been demonstrated to be as good as or better than statistical methods (eg. COX regression model) in cancer prognosis (<xref ref-type="bibr" rid="B12">12</xref>).</p>
</list-item>
</list>
<p>Surgical treatment of CRC still remains the most important curative component. Artificial intelligence in CRC surgery is not nearly as advanced as it is in screening (colonoscopy), diagnosis and prognosis. This is most likely due to the increased complexity and variability of structures and elements in all fields of view, as well as a general shortage of equivalent annotated video banks for utilization (<xref ref-type="bibr" rid="B13">13</xref>, <xref ref-type="bibr" rid="B14">14</xref>).</p>
<p>The aim of this minireview is to summarize up to date information on the possibility of using AI in improving the outcome of surgical treatment of CRC. It concentrates on the intraoperative steps which can benefit from AI and summaries the published studies, it gives a brief outline of current AI applications in colorectal surgery. It also analysis the current advances in CRC robotic treatment, especially automated surgeries. In order to make appropriate decisions on topics deserving of further investigation, it is necessary to understand the existing situation of AI in the surgical treatment of CRC.</p>
</sec>
<sec id="s2">
<label>2</label>
<title>Methods</title>
<p>A literature search was performed up to September 5th, 2022 using the following online databases: PubMed,Embase, Cochrane Library. The terms AI, OR, and surgery, including synonyms or equivalent terms, were used to obtain the literature. We have read the abstracts and selected the articles presenting data which can be used during CRC surgical treatment. The literature search retrieved 1484 articles, from 3 databases. Finally, 10 studies were included. The flow diagram can be viewed in <xref ref-type="fig" rid="f1">
<bold>Figure&#xa0;1</bold>
</xref>.</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>PRISMA 2020 flow diagram search (<xref ref-type="bibr" rid="B15">15</xref>).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fonc-13-1116761-g001.tif"/>
</fig>
<p>
<xref ref-type="table" rid="T1">
<bold>Table&#xa0;1</bold>
</xref> shows an overview of the included studies, their application, and the specific AI subfield the application is based on.</p>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Overview of included studies with specific applications.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="left">Application</th>
<th valign="top" align="center">Study</th>
<th valign="top" align="center">Year</th>
<th valign="top" align="center">AI subfield</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Autonomous robotic intestinal anastomosis</td>
<td valign="top" align="left">Shademan et&#xa0;al. (<xref ref-type="bibr" rid="B16">16</xref>)<break/>Saeidi et&#xa0;al. (<xref ref-type="bibr" rid="B17">17</xref>)</td>
<td valign="top" align="center">2016<break/>2022</td>
<td valign="top" align="left">CNN</td>
</tr>
<tr>
<td valign="top" align="left">Phase recognition</td>
<td valign="top" align="left">Kitaguchi et&#xa0;al. (<xref ref-type="bibr" rid="B18">18</xref>)</td>
<td valign="top" align="center">2020</td>
<td valign="top" align="left">CNN</td>
</tr>
<tr>
<td valign="top" align="left">Excision plane navigation</td>
<td valign="top" align="left">Igaki et&#xa0;al. (<xref ref-type="bibr" rid="B19">19</xref>)</td>
<td valign="top" align="center">2022</td>
<td valign="top" align="left">DL</td>
</tr>
<tr>
<td valign="top" align="left">Camera guidance</td>
<td valign="top" align="left">Wagner et&#xa0;al. (<xref ref-type="bibr" rid="B20">20</xref>)</td>
<td valign="top" align="center">2021</td>
<td valign="top" align="left">ML</td>
</tr>
<tr>
<td valign="top" align="left">AI real-time microcirculation analysis using ICG</td>
<td valign="top" align="left">Park et&#xa0;al. (<xref ref-type="bibr" rid="B21">21</xref>)</td>
<td valign="top" align="center">2020</td>
<td valign="top" align="left">ML</td>
</tr>
<tr>
<td valign="top" align="left">Knot-tying</td>
<td valign="top" align="left">Weede et&#xa0;al. (<xref ref-type="bibr" rid="B22">22</xref>)</td>
<td valign="top" align="center">2011</td>
<td valign="top" align="left">RNN</td>
</tr>
<tr>
<td valign="top" align="left">Optical biopsy</td>
<td valign="top" align="left">Jansen-Winlem et&#xa0;al. (<xref ref-type="bibr" rid="B23">23</xref>)<break/>Collins et&#xa0;al. (<xref ref-type="bibr" rid="B24">24</xref>)<break/>Okamoto et&#xa0;al. (<xref ref-type="bibr" rid="B25">25</xref>)</td>
<td valign="top" align="center">2021<break/>2022<break/>2022</td>
<td valign="top" align="left">CNN<break/>CNN<break/>CNN &amp;DL</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>ICG, indocyanine green; HIS, hyperspectral imaging; CNN, convolutional neural networks; DL, deep learning; ML, machine learning; RNN, recurrent neural networks.</p>
</fn>
</table-wrap-foot>
</table-wrap>
</sec>
<sec id="s3" sec-type="results">
<label>3</label>
<title>Results</title>
<sec id="s3_1">
<label>3.1</label>
<title>Surgical robots</title>
<p>With the introduction of robotic colorectal surgery, colorectal cancer surgical treatment has entered a new era. The da Vinci System is now the most extensively utilized robotic surgical. It allows surgeons to execute extremely delicate or highly complex procedures with wristed devices that have seven degrees of freedom. When compared to traditional open surgery, the advantages of surgery with these robots include a shorter period of recovery and hospital stay, minimum scarring, smaller incisions, and a significant reduction in the risk of surgical site infections, postoperative pain, and blood loss (<xref ref-type="bibr" rid="B26">26</xref>, <xref ref-type="bibr" rid="B27">27</xref>). Surgeons can operate with a larger viewing field thanks to computer-controlled instruments. Computer-controlled devices enable surgeons to work with a wider viewing field, greater flexibility, dexterity, precision, and less fatigue. The da Vinci dual-console provides integrated teaching and supervision, for residents&#x2019; surgical training. The Senhance surgical robot (TransEnterix Surgical Inc., Morrisville, NC, USA) is a laparoscopic-based technology that allows skilled laparoscopic surgeons to perform more sophisticated surgeries (<xref ref-type="bibr" rid="B28">28</xref>).</p>
<p>The robotic platform has a distinct benefit in that it allows access to difficult-to-reach locations, such as a narrow pelvis, while also preserving postoperative urinary and sexual function (<xref ref-type="bibr" rid="B29">29</xref>). Rectal surgery was related with higher conversion rates in men, obese patients, and patients getting a low anterior resection compared to robotic surgery in the ROLARR randomized clinical study (<xref ref-type="bibr" rid="B30">30</xref>). Recent research has also showed that robotic-assisted surgery appears to be more suitable for protecting the pelvic autonomic nerve (<xref ref-type="bibr" rid="B29">29</xref>, <xref ref-type="bibr" rid="B31">31</xref>, <xref ref-type="bibr" rid="B32">32</xref>).</p>
<p>The Intuitive Surgical da Vinci system pioneered the notion of transparent teleoperation, in which motions done by the surgeon on the control interface are precisely copied by surgical tools on the patient side. The lack of a decision-making process by the machine in the transparent teleoperation paradigm gives the surgeon unlimited control. However, these devices have certain algorithmic autonomy, such as tremor suppression and redundancy resolution, which do not interfere with the surgeon&#x2019;s actions (<xref ref-type="bibr" rid="B33">33</xref>). They can not be considered AI driven devices, but they offer a starting point for the hardware for future autonomous operating robots.</p>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Autonomous surgical robots in CRC surgery</title>
<p>Shademan et&#xa0;al. described complete <italic>in vivo</italic> autonomous robotic anastomosis of porcine intestine utilizing the Smart Tissue Autonomous Robot (STAR) (<xref ref-type="bibr" rid="B16">16</xref>). STAR surpassed human surgeons in a range of ex vivo and <italic>in vivo</italic> surgical tasks, despite being conducted in a carefully controlled experimental context. In later <italic>in vivo</italic> tests, STAR obtained 66.28% correctly placed stitches in the first attempt, which corresponded to an average of 0.34 suture hesitancy per stitch (<xref ref-type="bibr" rid="B17">17</xref>).</p>
<p>For the first time, these experiments proved the fledgling clinical feasibility of an autonomous soft-tissue surgical robot. STAR was controlled by artificial intelligence (AI) algorithms and received input from an array of optical and tactile sensors, as opposed to traditional surgical robots, which are managed in real time by people and have become ubiquitous in specific subspecialties.</p>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Phase and action recognition</title>
<p>Phase recognition is the task of identifying surgical images according to preset surgical phases. Phases are parts of surgical operations that are required to finish procedures successfully. They are often determined by consensus and recorded on surgical videos (<xref ref-type="bibr" rid="B34">34</xref>).</p>
<p>There are several studies of phase and action recognition, on different types of surgery, including colorectal surgery. The study of Kitaguchi et&#xa0;al. aimed to create a large annotated dataset containing laparoscopic colorectal surgery videos and to evaluate the accuracy of automatic recognition for surgical phase, action, and tool by combining AI with the dataset. They used 300 intraoperative videos and 82 million frames were marked for a phase and action classification task, while 4000 frames were marked for a tool segmentation task. 80% of the frames, were used for the training dataset and 20% for the test dataset. CNN was utilized to analyze the videos. The accuracy for the automatic surgical phase task was 81%, while the accuracy for action classification task was 83.2% (<xref ref-type="bibr" rid="B18">18</xref>).</p>
</sec>
<sec id="s3_4">
<label>3.4</label>
<title>Excision plane navigation</title>
<p>The creation of an image-guided navigation system for areolar tissue in the complete mesorectal excision plane using deep learning has been reported by Igaki et&#xa0;al. This could be useful to surgeons since areolar tissue can be utilized as a landmark for the optimum dissection plane. Deep learning-based semantic segmentation of areolar tissue was conducted in the whole mesorectal excision plane. The deep learning model was trained using intraoperative images of the whole mesorectal excision scene taken from left laparoscopic resection movies. Six hundred annotation images were generated from 32 videos, with 528 photos used in training and 72 images used in testing. The established semantic segmentation model helps in locating and emphasizing the areolar tissue area in the whole mesorectal excision plane (<xref ref-type="bibr" rid="B19">19</xref>).</p>
</sec>
<sec id="s3_5">
<label>3.5</label>
<title>Endoscopy Control</title>
<p>There are commercial systems available that allow the endoscopic camera to move without human intervention, following particular features in the scene., Viki (<xref ref-type="bibr" rid="B35">35</xref>), FreeHand (<xref ref-type="bibr" rid="B36">36</xref>), SOLOASSIST (<xref ref-type="bibr" rid="B37">37</xref>)and AutoLap (<xref ref-type="bibr" rid="B20">20</xref>), for example, do camera stabilization and target tracking. These were the first autonomous systems used to assist with MIS intervention. The autonomy is implemented <italic>via</italic> feature tracking algorithms that maintain the surgical instrument in the endoscope&#x2019;s visual field (<xref ref-type="bibr" rid="B38">38</xref>).</p>
<p>For AutoLap minimally invasive rectal resection with entire mesorectal excision was chosen to experimentally test cognitive camera guidance as this surgical method places great demands on camera control. A single surgeon performed twenty surgeries with human camera guidance for learning purposes. After the completion of the surgeon&#x2019;s learning curve, two different robots were trained on data from the manual camera guiding, followed by using one robot to train the other. The performance of the cognitive camera robot improved with experience The duration of each surgery improved as the robot became more experienced, also the quality of the camera guidance (evaluated by the surgeon as good/neutral/poor) improved, becoming good in 56.2% of evaluations (<xref ref-type="bibr" rid="B20">20</xref>).</p>
</sec>
<sec id="s3_6">
<label>3.6</label>
<title>AI-based real-time microcirculation analysis</title>
<p>In order to predict anastomotic complications attributable to hypoperfusion after laparoscopic colonic surgery, a fluorescence laparoscopic system can be used during surgery for angiography using indocyanine green (ICG). Each patient has a different perfusion status, due to individual variations in collateral circulation blood flow pathways, which provides a different ICG curve. A well-trained AI can forecast the probability of hypoperfusion-related anastomotic problems by analyzing the microcirculation state, by using numerous metrics and ICG curve patterns. The AI-based micro perfusion analysis system can help surgeons by quickly performing real-time analysis and giving information in a color map to surgeons. Using a neural network that imitate the visual cortex, Park et&#xa0;al. clustered 10,000 ICG curves into 25 patterns using unsupervised learning, an AI training approach that does not require annotations during training. ICG curves were derived from 65 processes. Curves were preprocessed to minimize the degradation of the AI model caused by external factors such light source reflection, background, and camera movement. The AI model revealed more accuracy in the microcirculation evaluation when the AUC of the AI-based technique was compared to T1/2 max max (time from first fluorescence increase to half of maximum), TR (time ratio: T1/2 max/Tmax, Tmax is the time form first fluorescence increase to maximum), and RS (rise slope), with values of 0.842, 0.750, 0.734, and 0.677, respectively. This makes it easier to create a color mapping scheme of red-green-blue areas that classifies the degree of vascularization. In comparison to a surgeon&#x2019;s solely visual inspection, this AI model delivers a more objective and accurate approach of fluorescence signal evaluation. It can provide an immediate evaluation of the grade of perfusion during minimally invasive colorectal procedures, allowing for early detection of insufficient vascularization (<xref ref-type="bibr" rid="B21">21</xref>, <xref ref-type="bibr" rid="B39">39</xref>).</p>
</sec>
<sec id="s3_7">
<label>3.7</label>
<title>Knot-tying</title>
<p>Knot-tying is part of basic surgical skills and a quick technique in open surgery, while laparoscopic knot-tying can take up to three minutes for a single knot to be done. Mayer et&#xa0;al. described a solution based on RNNs to speed up knot-tying in robotic cardiac surgery. The surgeon inputs a sequence to the network (for example, instances of human-performed knot-tying), and an RNN with long-term storage learns the task. The preprogrammed controller was able to construct a knot in 33.7 seconds, however the introduction of an RNN offered a speed improvement of about 25% after learning from 50 prior runs, generating a knot in 25.8 seconds (<xref ref-type="bibr" rid="B22">22</xref>, <xref ref-type="bibr" rid="B40">40</xref>, <xref ref-type="bibr" rid="B41">41</xref>).</p>
</sec>
<sec id="s3_8">
<label>3.8</label>
<title>AI in automatic optical biopsy and hyperspectral imaging for CRC</title>
<p>Optical biopsy is a light-based nondestructive <italic>in situ</italic> assessment of tissue pathologic features. Hyperspectral imaging (HSI) is a non-invasive optical imaging tool that provides pixel-by-pixel spectroscopic and spatial information about the investigated area. Tissue-light interaction produces distinct spectral signatures, allowing the visualization of tissular perfusion and differentiation of tissue types. HSI cameras are commercially available and are easily compatible with laparoscopes (<xref ref-type="bibr" rid="B42">42</xref>, <xref ref-type="bibr" rid="B43">43</xref>).. In the past years several very promising studies, which used different AI methods in detecting CRC during surgery using HIS, were published.</p>
<p>Jansen-Winkeln et&#xa0;al. used HSI records from 54 patients who underwent colorectal resections, creating a realistic intraoperative setting for their study. By using a CNN method, they obtained a sensitivity if 86% and specificity of 95% for the distinction between cancer and healthy mucosa, while differentiating cancer against adenoma had a sensitivity of 68%, and 59% specificity o for CCR (<xref ref-type="bibr" rid="B23">23</xref>).</p>
<p>Collins et&#xa0;al. used HIS imaging on specimens obtained immediately after extraction from 34 patients undergoing surgical resection for CRC. Using a CNN to automatically detect CRC in the HIS images they obtained a sensitivity of 87% and specificity of 90% for cancer detection. Their approach could be used for objectively assessing tumor margins during surgery (<xref ref-type="bibr" rid="B24">24</xref>).</p>
<p>By combining HSI and CNN trained with deep learning on porcine models, Okamoto et&#xa0;al. obtained an automatic distinction of different anatomical layers in CRC surgery, achieving a recognition sensitivity of 79.0 &#xb1; 21.0% for the retroperitoneum and 86.0 &#xb1; 16.0% for the colon and mesentery (<xref ref-type="bibr" rid="B25">25</xref>)..These results are promising in improving the results of complete mesocolic excision, by lowering the complications associated with it (like lesions of the ureter, gonadal vessels)and offering a better oncologic result.</p>
</sec>
</sec>
<sec id="s4" sec-type="discussion">
<label>4</label>
<title>Discussions</title>
<p>This minireview offers an overview of various AI applications currently available for the surgical treatment of CRC, which will show their utility in improving treatment outcome in the future. Although promising in their pilot effort, the AI applications mentioned in this article are not ready yet for large-scale clinical usage.</p>
<p>Autonomous robots are still part of the future, but the moment they will become part of the surgical treatment is getting nearer. The hardware part is available (commercially available surgical robots), while several intraoperative aspects of CRC surgeries have been captured, analyzed and successfully reproduced using AI.</p>
<p>The current use of AI to the medical area is steadily changing the diagnostic and treatment approach to a wide range of diseases. While many AI applications have been used and investigated in several cancer entities, such as lung and breast cancer, the use of AI in CRC is still in its early stages (<xref ref-type="bibr" rid="B44">44</xref>). AI&#x2019;s utility in CRC has been established mostly for aiding in screening and staging. Meanwhile, evidence on the use of AI in colorectal surgery is limited.</p>
<p>Surgical data and applications are more difficult to analyze and use than data for AI in screening endoscopy, radiology, and pathology. Surgical movies are dynamic, displaying difficult-to-model tool-tissue interactions that modify and even entirely reshape anatomical situations. Surgical workflows and techniques are difficult to standardize, particularly in long and unpredictable operations like colorectal surgery for CRC. During surgical interventions, surgeons use prior knowledge, such as preoperative imaging, as well as their personal experience and intuition to make decisions. More and better data are required to address these challenges. This includes reaching an agreement on annotation techniques (<xref ref-type="bibr" rid="B45">45</xref>) and publicly publishing vast, high-quality annotated datasets. Multiple institutions must collaborate in this context to ensure that data are diverse and representative (<xref ref-type="bibr" rid="B46">46</xref>). Such datasets will be critical for training stronger AI models, and also for demonstrating generalizability through external validation studies (<xref ref-type="bibr" rid="B47">47</xref>).</p>
</sec>
<sec id="s5" sec-type="conclusions">
<label>5</label>
<title>Conclusions</title>
<p>The use of AI in CRC surgery is still at its beginnings, despite the fact that AI has already demonstrated its clear clinical benefits in the screening and diagnosis of CRC. Many studies are still in the preclinical phase. The development of AI models capable of reproducing a colorectal expert surgeon&#x2019;s skill, the creation of large and complex datasets and the standardization of surgical colorectal procedures will contribute to the widespread use of AI in CRC surgical treatment.</p>
</sec>
<sec id="s6" sec-type="author-contributions">
<title>Author contributions</title>
<p>MA contributed to the conception, research of the primary literature, and writing of the article. DL. MM contributed to the conception and research of the primary literature for the article. SO contributed to the conception, research of the primary literature, and writing of the article. All authors contributed to the article and approved the submitted version.</p>
</sec>
</body>
<back>
<sec id="s7" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="s8" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors&#xa0;and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<label>1</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sung</surname> <given-names>H</given-names>
</name>
<name>
<surname>Ferlay</surname> <given-names>J</given-names>
</name>
<name>
<surname>Siegel</surname> <given-names>RL</given-names>
</name>
<name>
<surname>Laversanne</surname> <given-names>M</given-names>
</name>
<name>
<surname>Soerjomataram</surname> <given-names>I</given-names>
</name>
<name>
<surname>Jemal</surname> <given-names>A</given-names>
</name>
<etal/>
</person-group>. <article-title>Global cancer statistics 2020: GLOBOCAN estimates of incidence and mortality worldwide for 36 cancers in 185 countries</article-title>. <source>CA Cancer J Clin</source> (<year>2021</year>) <volume>71</volume>:<page-range>209&#x2013;49</page-range>. doi: <pub-id pub-id-type="doi">10.3322/caac.21660</pub-id>
</citation>
</ref>
<ref id="B2">
<label>2</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fitzmaurice</surname> <given-names>C</given-names>
</name>
<name>
<surname>Abate</surname> <given-names>D</given-names>
</name>
<name>
<surname>Abbasi</surname> <given-names>N</given-names>
</name>
<name>
<surname>Abbastabar</surname> <given-names>H</given-names>
</name>
<name>
<surname>Abd-Allah</surname> <given-names>F</given-names>
</name>
<name>
<surname>Abdel-Rahman</surname> <given-names>O</given-names>
</name>
<etal/>
</person-group>. <article-title>Global, regional, and national cancer incidence, mortality, years of life lost, years lived with disability, and disability-adjusted life-years for 29 cancer groups, 1990 to 2017: A systematic analysis for the global burden of disease study</article-title>. <source>JAMA Oncol</source> (<year>2019</year>) <volume>5</volume>:<page-range>1749&#x2013;68</page-range>. doi: <pub-id pub-id-type="doi">10.1001/jamaoncol.2019.2996</pub-id>
</citation>
</ref>
<ref id="B3">
<label>3</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Miller</surname> <given-names>KD</given-names>
</name>
<name>
<surname>Nogueira</surname> <given-names>L</given-names>
</name>
<name>
<surname>Mariotto</surname> <given-names>AB</given-names>
</name>
<name>
<surname>Rowland</surname> <given-names>JH</given-names>
</name>
<name>
<surname>Yabroff</surname> <given-names>KR</given-names>
</name>
<name>
<surname>Alfano</surname> <given-names>CM</given-names>
</name>
<etal/>
</person-group>. <article-title>Cancer treatment and survivorship statistics, 2019</article-title>. <source>CA Cancer J Clin</source> (<year>2019</year>) <volume>69</volume>:<page-range>363&#x2013;85</page-range>. doi: <pub-id pub-id-type="doi">10.3322/caac.21565</pub-id>
</citation>
</ref>
<ref id="B4">
<label>4</label>
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Shalev-Shwartz</surname> <given-names>S</given-names>
</name>
<name>
<surname>Ben-David</surname> <given-names>S</given-names>
</name>
</person-group>. <source>Understanding machine learning: From theory to algorithms</source>. <publisher-loc>Cambridge, UK</publisher-loc>: <publisher-name>Cambridge University Press</publisher-name> (<year>2014</year>).</citation>
</ref>
<ref id="B5">
<label>5</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hamet</surname> <given-names>P</given-names>
</name>
<name>
<surname>Tremblay</surname> <given-names>J</given-names>
</name>
</person-group>. <article-title>Artificial intelligence in medicine</article-title>. <source>Metabolism</source> (<year>2017</year>) <volume>69</volume>:<page-range>S36&#x2013;40</page-range>. doi: <pub-id pub-id-type="doi">10.1016/j.metabol.2017.01.011</pub-id>
</citation>
</ref>
<ref id="B6">
<label>6</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ruffle</surname> <given-names>JK</given-names>
</name>
<name>
<surname>Farmer</surname> <given-names>AD</given-names>
</name>
<name>
<surname>Aziz</surname> <given-names>Q</given-names>
</name>
</person-group>. <article-title>Artificial intelligence-assisted gastroenterology&#x2013;promises and pitfalls</article-title>. <source>Am J Gastroenterol</source> (<year>2019</year>) <volume>114</volume>:<page-range>422&#x2013;8</page-range>. doi: <pub-id pub-id-type="doi">10.1038/s41395-018-0268-4</pub-id>
</citation>
</ref>
<ref id="B7">
<label>7</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hashimoto</surname> <given-names>DA</given-names>
</name>
<name>
<surname>Rosman</surname> <given-names>G</given-names>
</name>
<name>
<surname>Rus</surname> <given-names>D</given-names>
</name>
<name>
<surname>Meireles</surname> <given-names>OR</given-names>
</name>
</person-group>. <article-title>Artificial intelligence in surgery: Promises and perils</article-title>. <source>Ann Surg</source> (<year>2018</year>) <volume>268</volume>(<issue>1</issue>):<page-range>70&#x2013;6</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1097/sla.0000000000002693</pub-id>
</citation>
</ref>
<ref id="B8">
<label>8</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Deo</surname> <given-names>RC</given-names>
</name>
</person-group>. <article-title>Machine learning in medicine</article-title>. <source>Circulation</source> (<year>2015</year>) <volume>132</volume>(<issue>20</issue>):<page-range>1920&#x2013;30</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1161/circulationaha.115.001593</pub-id>
</citation>
</ref>
<ref id="B9">
<label>9</label>
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Szeliski</surname> <given-names>R</given-names>
</name>
</person-group>. <source>Computer vision: Algorithms and applications</source>. <publisher-loc>New York City, NY</publisher-loc>: <publisher-name>Springer Science &amp; Business Media</publisher-name> (<year>2010</year>).</citation>
</ref>
<ref id="B10">
<label>10</label>
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Huang</surname> <given-names>T</given-names>
</name>
</person-group>. <source>Computer vision: Evolution and promise</source>. <publisher-loc>Champaign, IL</publisher-loc>: <publisher-name>University of Illinois Press</publisher-name> (<year>1996</year>).</citation>
</ref>
<ref id="B11">
<label>11</label>
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Sonka</surname> <given-names>M</given-names>
</name>
<name>
<surname>Hlavac</surname> <given-names>V</given-names>
</name>
<name>
<surname>Boyle</surname> <given-names>R</given-names>
</name>
</person-group>. <source>Image processing, analysis,and machine vision</source>. <publisher-loc>Boston, MA</publisher-loc>: <publisher-name>Cengage Learning</publisher-name> (<year>2014</year>).</citation>
</ref>
<ref id="B12">
<label>12</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chahal</surname> <given-names>D</given-names>
</name>
<name>
<surname>Byrne</surname> <given-names>MFA</given-names>
</name>
</person-group>. <article-title>Primer on artificial intelligence and its application to endoscopy</article-title>. <source>Gastrointest Endosc</source> (<year>2020</year>) <volume>92</volume>:<page-range>813&#x2013;20</page-range>. doi: <pub-id pub-id-type="doi">10.1016/j.gie.2020.04.074</pub-id>
</citation>
</ref>
<ref id="B13">
<label>13</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pacal</surname> <given-names>I</given-names>
</name>
<name>
<surname>Karaboga</surname> <given-names>D</given-names>
</name>
<name>
<surname>Basturk</surname> <given-names>A</given-names>
</name>
<name>
<surname>Akay</surname> <given-names>B</given-names>
</name>
<name>
<surname>Nalbantoglu</surname> <given-names>UA</given-names>
</name>
</person-group>. <article-title>Comprehensive review of deep learning in colon cancer</article-title>. <source>Comput Biol Med</source> (<year>2020</year>) <volume>126</volume>:<fpage>104003</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.compbiomed.2020.104003</pub-id>
</citation>
</ref>
<ref id="B14">
<label>14</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Goyal</surname> <given-names>H</given-names>
</name>
<name>
<surname>Mann</surname> <given-names>R</given-names>
</name>
<name>
<surname>Gandhi</surname> <given-names>Z</given-names>
</name>
<name>
<surname>Perisetti</surname> <given-names>A</given-names>
</name>
<name>
<surname>Ali</surname> <given-names>A</given-names>
</name>
<name>
<surname>Aman Ali</surname> <given-names>K</given-names>
</name>
<etal/>
</person-group>. <article-title>Scope of artificial intelligence in screening and diagnosis of colorectal cancer</article-title>. <source>J Clin Med</source> (<year>2020</year>) <volume>9</volume>:<fpage>3313</fpage>. doi: <pub-id pub-id-type="doi">10.3390/jcm9103313</pub-id>
</citation>
</ref>
<ref id="B15">
<label>15</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Page</surname> <given-names>MJ</given-names>
</name>
<name>
<surname>McKenzie</surname> <given-names>JE</given-names>
</name>
<name>
<surname>Bossuyt</surname> <given-names>PM</given-names>
</name>
<name>
<surname>Boutron</surname> <given-names>I</given-names>
</name>
<name>
<surname>Hoffmann</surname> <given-names>TC</given-names>
</name>
<name>
<surname>Mulrow</surname> <given-names>CD</given-names>
</name>
<etal/>
</person-group>. <article-title>The PRISMA 2020 statement: An updated guideline for reporting systematic reviews</article-title>. <source>J&#xa0;Clin Epidemiol</source> (<year>2021</year>) <volume>134</volume>:<page-range>178&#x2013;89</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jclinepi.2021.03.001</pub-id>
</citation>
</ref>
<ref id="B16">
<label>16</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shademan</surname> <given-names>A</given-names>
</name>
<name>
<surname>Decker</surname> <given-names>RS</given-names>
</name>
<name>
<surname>Opfermann</surname> <given-names>JD</given-names>
</name>
<name>
<surname>Leonard</surname> <given-names>S</given-names>
</name>
<name>
<surname>Krieger</surname> <given-names>A</given-names>
</name>
<name>
<surname>Kim</surname> <given-names>PC</given-names>
</name>
</person-group>. <article-title>Supervised autonomous robotic soft tissue surgery</article-title>. <source>Sci Transl Med</source> (<year>2016</year>) <volume>8</volume>(<issue>337</issue>):<fpage>337ra64</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1126/scitranslmed.aad9398</pub-id>
</citation>
</ref>
<ref id="B17">
<label>17</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Saeidi</surname> <given-names>H</given-names>
</name>
<name>
<surname>Opfermann</surname> <given-names>JD</given-names>
</name>
<name>
<surname>Kam</surname> <given-names>M</given-names>
</name>
<name>
<surname>Wei</surname> <given-names>S</given-names>
</name>
<name>
<surname>Leonard</surname> <given-names>S</given-names>
</name>
<name>
<surname>Hsieh</surname> <given-names>MH</given-names>
</name>
<etal/>
</person-group>. <article-title>Autonomous robotic laparoscopic surgery for intestinal anastomosis</article-title>. <source>Sci Robot</source> (<year>2022</year>) <volume>7</volume>(<issue>62</issue>):<elocation-id>eabj2908</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1126/scirobotics.abj2908</pub-id>
</citation>
</ref>
<ref id="B18">
<label>18</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kitaguchi</surname> <given-names>D</given-names>
</name>
<name>
<surname>Takeshita</surname> <given-names>N</given-names>
</name>
<name>
<surname>Matsuzaki</surname> <given-names>H</given-names>
</name>
<name>
<surname>Oda</surname> <given-names>T</given-names>
</name>
<name>
<surname>Watanabe</surname> <given-names>M</given-names>
</name>
<name>
<surname>Mori</surname> <given-names>K</given-names>
</name>
<etal/>
</person-group>. <article-title>Automated laparoscopic colorectal surgery workflow recognition using artificial intelligence: Experimental research</article-title>. <source>Int J Surg</source> (<year>2020</year>) <volume>79</volume>:<fpage>88</fpage>&#x2013;<lpage>94</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ijsu.2020.05.015</pub-id>
</citation>
</ref>
<ref id="B19">
<label>19</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Igaki</surname> <given-names>T</given-names>
</name>
<name>
<surname>Kitaguchi</surname> <given-names>D</given-names>
</name>
<name>
<surname>Kojima</surname> <given-names>S</given-names>
</name>
<name>
<surname>Hasegawa</surname> <given-names>H</given-names>
</name>
<name>
<surname>Takeshita</surname> <given-names>N</given-names>
</name>
<name>
<surname>Mori</surname> <given-names>K</given-names>
</name>
<etal/>
</person-group>. <article-title>Artificial intelligence-based total mesorectal excision plane navigation in laparoscopic colorectal surgery</article-title>. <source>Dis Colon Rectum</source> (<year>2022</year>) <volume>65</volume>(<issue>5</issue>):<page-range>e329&#x2013;33</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1097/DCR.0000000000002393</pub-id>
</citation>
</ref>
<ref id="B20">
<label>20</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wagner</surname> <given-names>M</given-names>
</name>
<name>
<surname>Bihlmaier</surname> <given-names>A</given-names>
</name>
<name>
<surname>Kenngott</surname> <given-names>HG</given-names>
</name>
<name>
<surname>Mietkowski</surname> <given-names>P</given-names>
</name>
<name>
<surname>Scheikl</surname> <given-names>PM</given-names>
</name>
<name>
<surname>Bodenstedt</surname> <given-names>S</given-names>
</name>
<etal/>
</person-group>. <article-title>A learning robot for cognitive camera control in minimally invasive surgery</article-title>. <source>Surg Endosc</source> (<year>2021</year>) <volume>35</volume>:<page-range>5365&#x2013;74</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00464-021-08509-8</pub-id>
</citation>
</ref>
<ref id="B21">
<label>21</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Park</surname> <given-names>SH</given-names>
</name>
<name>
<surname>Park</surname> <given-names>HM</given-names>
</name>
<name>
<surname>Baek</surname> <given-names>KR</given-names>
</name>
<name>
<surname>Ahn</surname> <given-names>HM</given-names>
</name>
<name>
<surname>Lee</surname> <given-names>IY</given-names>
</name>
<name>
<surname>Son</surname> <given-names>GM</given-names>
</name>
</person-group>. <article-title>Artificial intelligence based real-time microcirculation analysis system for laparoscopic colorectal surgery</article-title>. <source>World J Gastroenterol</source> (<year>2020</year>) <volume>26</volume>:<page-range>6945&#x2013;62</page-range>. doi: <pub-id pub-id-type="doi">10.3748/wjg.v26.i44.6945</pub-id>
</citation>
</ref>
<ref id="B22">
<label>22</label>
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Weede</surname> <given-names>O</given-names>
</name>
<name>
<surname>M&#xf6;nnich</surname> <given-names>H</given-names>
</name>
<name>
<surname>M&#xfc;ller</surname> <given-names>B</given-names>
</name>
<name>
<surname>W&#xf6;rn</surname> <given-names>H</given-names>
</name>
</person-group>. <article-title>An intelligent and autonomous endoscopic guidance system for minimally invasive surgery</article-title>, in: <source>2011 IEEE International Conference on Robotics and Automation</source>. (<year>2011</year>) <volume>2011</volume>:<page-range>5762&#x2013;8</page-range>. doi: <pub-id pub-id-type="doi">10.1109/ICRA.2011.5980216</pub-id>
</citation>
</ref>
<ref id="B23">
<label>23</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jansen-Winkeln</surname> <given-names>B</given-names>
</name>
<name>
<surname>Barberio</surname> <given-names>M</given-names>
</name>
<name>
<surname>Chalopin</surname> <given-names>C</given-names>
</name>
<name>
<surname>Schierle</surname> <given-names>K</given-names>
</name>
<name>
<surname>Diana</surname> <given-names>M</given-names>
</name>
<name>
<surname>K&#xf6;hler</surname> <given-names>H</given-names>
</name>
<etal/>
</person-group>. <article-title>Feedforward artificial neural network-based colorectal cancer detection using hyperspectral imaging: A step towards automatic optical biopsy</article-title>. <source>Cancers (Basel)</source> (<year>2021</year>) <volume>13</volume>(<issue>5</issue>):<elocation-id>967</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/cancers13050967</pub-id>
</citation>
</ref>
<ref id="B24">
<label>24</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Collins</surname> <given-names>T</given-names>
</name>
<name>
<surname>Bencteux</surname> <given-names>V</given-names>
</name>
<name>
<surname>Benedicenti</surname> <given-names>S</given-names>
</name>
<name>
<surname>Moretti</surname> <given-names>V</given-names>
</name>
<name>
<surname>Mita</surname> <given-names>MT</given-names>
</name>
<name>
<surname>Barbieri</surname> <given-names>V</given-names>
</name>
<etal/>
</person-group>. <article-title>Automatic optical biopsy for colorectal cancer using hyperspectral imaging and artificial neural networks</article-title>. <source>Surg Endosc</source> (<year>2022</year>) <volume>36</volume>:<page-range>8549&#x2013;59</page-range>. doi: <pub-id pub-id-type="doi">10.1007/s00464-022-09524-z</pub-id>
</citation>
</ref>
<ref id="B25">
<label>25</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Okamoto</surname> <given-names>N</given-names>
</name>
<name>
<surname>Rodr&#xed;guez-Luna</surname> <given-names>MR</given-names>
</name>
<name>
<surname>Bencteux</surname> <given-names>V</given-names>
</name>
<name>
<surname>Al-Taher</surname> <given-names>M</given-names>
</name>
<name>
<surname>Cinelli</surname> <given-names>L</given-names>
</name>
<name>
<surname>Felli</surname> <given-names>E</given-names>
</name>
<etal/>
</person-group>. <article-title>Computer-assisted differentiation between colon-mesocolon and retroperitoneum using hyperspectral imaging (HSI) technology</article-title>. <source>Diagnost (Basel)</source> (<year>2022</year>) <volume>12</volume>(<issue>9</issue>):<elocation-id>2225</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/diagnostics12092225</pub-id>
</citation>
</ref>
<ref id="B26">
<label>26</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hussain</surname> <given-names>A</given-names>
</name>
<name>
<surname>Malik</surname> <given-names>A</given-names>
</name>
<name>
<surname>Halim</surname> <given-names>MU</given-names>
</name>
<name>
<surname>Ali</surname> <given-names>AM</given-names>
</name>
</person-group>. <article-title>The use of robotics in surgery: A review</article-title>. <source>Int J Clin Pract</source> (<year>2014</year>) <volume>68</volume>:<page-range>1376&#x2013;82</page-range>. doi: <pub-id pub-id-type="doi">10.1111/ijcp.12492</pub-id>
</citation>
</ref>
<ref id="B27">
<label>27</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Albani</surname> <given-names>JM</given-names>
</name>
</person-group>. <article-title>The role of robotics in surgery: A review</article-title>. <source>Mo Med</source> (<year>2007</year>) <volume>104</volume>:<page-range>166&#x2013;72</page-range>.</citation>
</ref>
<ref id="B28">
<label>28</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hirano</surname> <given-names>Y</given-names>
</name>
<name>
<surname>Kondo</surname> <given-names>H</given-names>
</name>
<name>
<surname>Yamaguchi</surname> <given-names>S</given-names>
</name>
</person-group>. <article-title>Robot-assisted surgery with senhance robotic system for colon cancer: Our original single-incision plus 2-port procedure and a review of the literature</article-title>. <source>Tech Coloproctol</source> (<year>2021</year>) <volume>25</volume>:<fpage>1</fpage>&#x2013;<lpage>5</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s10151-020-02389-1</pub-id>
</citation>
</ref>
<ref id="B29">
<label>29</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kim</surname> <given-names>HJ</given-names>
</name>
<name>
<surname>Choi</surname> <given-names>G-S</given-names>
</name>
<name>
<surname>Park</surname> <given-names>JS</given-names>
</name>
<name>
<surname>Park</surname> <given-names>SY</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>CS</given-names>
</name>
<name>
<surname>Lee</surname> <given-names>HJ</given-names>
</name>
</person-group>. <article-title>The impact of robotic surgery on quality of life, Uri-nary and sexual function following total mesorectal excision for rectal cancer: A propensity score-matched analysis with laparoscopic surgery</article-title>. <source>Colorectal Dis</source> (<year>2018</year>) <volume>20</volume>:<page-range>O103&#x2013;13</page-range>. doi: <pub-id pub-id-type="doi">10.1111/codi.14051</pub-id>
</citation>
</ref>
<ref id="B30">
<label>30</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jayne</surname> <given-names>D</given-names>
</name>
<name>
<surname>Pigazzi</surname> <given-names>A</given-names>
</name>
<name>
<surname>Marshall</surname> <given-names>H</given-names>
</name>
<name>
<surname>Croft</surname> <given-names>J</given-names>
</name>
<name>
<surname>Corrigan</surname> <given-names>N</given-names>
</name>
<name>
<surname>Copeland</surname> <given-names>J</given-names>
</name>
<etal/>
</person-group>. <article-title>Effect of robotic-assisted vs conventional laparoscopic surgery on risk of conversion to open laparotomy among patients undergoing resection for rectal cancer: The ROLARR randomized clinical trial</article-title>. <source>JAMA</source> (<year>2017</year>) <volume>318</volume>:<page-range>1569&#x2013;80</page-range>. doi: <pub-id pub-id-type="doi">10.1001/jama.2017.7219</pub-id>
</citation>
</ref>
<ref id="B31">
<label>31</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yang</surname> <given-names>S-X</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>Z-Q</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>Q-B</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>J-Z</given-names>
</name>
<name>
<surname>Chang</surname> <given-names>Y</given-names>
</name>
<name>
<surname>Xia</surname> <given-names>K-K</given-names>
</name>
<etal/>
</person-group>. <article-title>Security and radical assessment in open, laparoscopic, robotic colorectal cancer surgery: A comparative study</article-title>. <source>Technol Cancer Res Treat</source> (<year>2018</year>) <volume>17</volume>. doi: <pub-id pub-id-type="doi">10.1177/1533033818794160</pub-id>
</citation>
</ref>
<ref id="B32">
<label>32</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mitsala</surname> <given-names>A</given-names>
</name>
<name>
<surname>Tsalikidis</surname> <given-names>C</given-names>
</name>
<name>
<surname>Pitiakoudis</surname> <given-names>M</given-names>
</name>
<name>
<surname>Simopoulos</surname> <given-names>C</given-names>
</name>
<name>
<surname>Tsaroucha</surname> <given-names>AK</given-names>
</name>
</person-group>. <article-title>Artificial intelligence in colorectal cancer screening, diagnosis and treatment</article-title>. <source>A New Era Curr Oncol</source> (<year>2021</year>) <volume>28</volume>(<issue>3</issue>):<page-range>1581&#x2013;607</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/curroncol28030149</pub-id>
</citation>
</ref>
<ref id="B33">
<label>33</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Attanasio</surname> <given-names>A</given-names>
</name>
<name>
<surname>Scaglioni</surname> <given-names>B</given-names>
</name>
<name>
<surname>De Momi</surname> <given-names>E</given-names>
</name>
<name>
<surname>Fiorini</surname> <given-names>P</given-names>
</name>
<name>
<surname>Valdastri</surname> <given-names>P</given-names>
</name>
</person-group>. <article-title>Autonomy in surgical robotics</article-title>. <source>Annu Rev Control Robot Auto Syst</source> (<year>2021</year>) <volume>4</volume>(<issue>1</issue>):<page-range>651&#x2013;79</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1146/annurev-control-062420-090543</pub-id>
</citation>
</ref>
<ref id="B34">
<label>34</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Garrow</surname> <given-names>CR</given-names>
</name>
<name>
<surname>Kowalewski</surname> <given-names>KF</given-names>
</name>
<name>
<surname>Li</surname> <given-names>L</given-names>
</name>
<name>
<surname>Wagner</surname> <given-names>M</given-names>
</name>
<name>
<surname>Schmidt</surname> <given-names>MW</given-names>
</name>
<name>
<surname>Engelhardt</surname> <given-names>S</given-names>
</name>
<etal/>
</person-group>. <article-title>Machine learning for surgical phase recognition: A systematic review</article-title>. <source>Ann Surg</source> (<year>2021</year>) <volume>273</volume>:<page-range>684&#x2013;93</page-range>. doi: <pub-id pub-id-type="doi">10.1097/SLA.0000000000004425</pub-id>
</citation>
</ref>
<ref id="B35">
<label>35</label>
<citation citation-type="web">
<source>Endo control system website</source>. Available at: <uri xlink:href="https://www.endocontrol-medical.com/en/viky-en/">https://www.endocontrol-medical.com/en/viky-en/</uri> (Accessed <access-date>Sept. 10, 2022</access-date>).</citation>
</ref>
<ref id="B36">
<label>36</label>
<citation citation-type="web">
<source>Free hand system website</source>. Available at: <uri xlink:href="https://www.freehandsurgeon.com">https://www.freehandsurgeon.com</uri> (Accessed <access-date>Sept. 10, 2022</access-date>).</citation>
</ref>
<ref id="B37">
<label>37</label>
<citation citation-type="web">
<source>SOLOASSIST system website</source>. Available at: <uri xlink:href="https://aktormed.info/en/products/soloassist-en">https://aktormed.info/en/products/soloassist-en</uri> (Accessed <access-date>Sept. 10, 2022</access-date>).</citation>
</ref>
<ref id="B38">
<label>38</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fiorini</surname> <given-names>P</given-names>
</name>
<name>
<surname>Goldberg</surname> <given-names>KY</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>Y</given-names>
</name>
<name>
<surname>Taylor</surname> <given-names>RH</given-names>
</name>
</person-group>. <article-title>Concepts and trends n autonomy for robot-assisted surgery</article-title>. <source>Proc IEEE Inst Electr Electron Eng</source> (<year>2022</year>) <volume>110</volume>(<issue>7</issue>):<fpage>993</fpage>&#x2013;<lpage>1011</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/JPROC.2022.3176828</pub-id>
</citation>
</ref>
<ref id="B39">
<label>39</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Vasey</surname> <given-names>B</given-names>
</name>
<name>
<surname>Nagendran</surname> <given-names>M</given-names>
</name>
<name>
<surname>Campbell</surname> <given-names>B</given-names>
</name>
<name>
<surname>Clifton</surname> <given-names>DA</given-names>
</name>
<name>
<surname>Collins</surname> <given-names>GS</given-names>
</name>
<name>
<surname>Denaxas</surname> <given-names>S</given-names>
</name>
<etal/>
</person-group>. <article-title>Reporting guideline for the early-stage clinical evaluation of decision support systems driven by artificial intelligence: DECIDE-AI</article-title>. <source>Nat Med</source> (<year>2022</year>) <volume>28</volume>:<page-range>924&#x2013;33</page-range>. doi: <pub-id pub-id-type="doi">10.1038/s41591-022-01772-9</pub-id>
</citation>
</ref>
<ref id="B40">
<label>40</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mayer</surname> <given-names>H</given-names>
</name>
<name>
<surname>Gomez</surname> <given-names>F</given-names>
</name>
<name>
<surname>Wierstra</surname> <given-names>D</given-names>
</name>
<name>
<surname>Nagy</surname> <given-names>I</given-names>
</name>
<name>
<surname>Knoll</surname> <given-names>A</given-names>
</name>
<name>
<surname>Schmidhuber</surname> <given-names>J</given-names>
</name>
</person-group>. <article-title>A system for robotic heart surgery that learns to tie knots using recurrent neural networks</article-title>. <source>Adv Robot</source> (<year>2008</year>) <volume>22</volume>(<issue>13-14</issue>):<page-range>1521&#x2013;37</page-range>. doi: <pub-id pub-id-type="doi">10.1163/156855308X360604</pub-id>
</citation>
</ref>
<ref id="B41">
<label>41</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kassahun</surname> <given-names>Y</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>B</given-names>
</name>
<name>
<surname>Tibebu</surname> <given-names>AT</given-names>
</name>
<name>
<surname>Stoyanov</surname> <given-names>D</given-names>
</name>
<name>
<surname>Giannarou</surname> <given-names>S</given-names>
</name>
<name>
<surname>Metzen</surname> <given-names>JH</given-names>
</name>
<etal/>
</person-group>. <article-title>Surgical robotics beyond enhanced dexterity instrumentation: a survey of machine learning techniques and their role in intelligent and autonomous surgical actions</article-title>. <source>Int J CARS</source> (<year>2016</year>) <volume>11</volume>:<page-range>553&#x2013;68</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11548-015-1305-z</pub-id>
</citation>
</ref>
<ref id="B42">
<label>42</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Barberio</surname> <given-names>M</given-names>
</name>
<name>
<surname>Longo</surname> <given-names>F</given-names>
</name>
<name>
<surname>Fiorillo</surname> <given-names>C</given-names>
</name>
<name>
<surname>Seeliger</surname> <given-names>B</given-names>
</name>
<name>
<surname>Mascagni</surname> <given-names>P</given-names>
</name>
<name>
<surname>Agnus</surname> <given-names>V</given-names>
</name>
<etal/>
</person-group>. <article-title>HYPerspectral enhanced reality (HYPER): A physiology-based surgical guidance tool</article-title>. <source>Surg Endosc</source> (<year>2020</year>) <volume>34</volume>:<page-range>1736&#x2013;44</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00464-019-06959-9</pub-id>
</citation>
</ref>
<ref id="B43">
<label>43</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Clancy</surname> <given-names>NT</given-names>
</name>
<name>
<surname>Jones</surname> <given-names>G</given-names>
</name>
<name>
<surname>Maier-Hein</surname> <given-names>L</given-names>
</name>
<name>
<surname>Elson</surname> <given-names>DS</given-names>
</name>
<name>
<surname>Stoyanov</surname> <given-names>D</given-names>
</name>
</person-group>. <article-title>Surgical spectral imaging</article-title>. <source>Med Image Anal</source> (<year>2020</year>) <volume>63</volume>:<elocation-id>101699</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.media.2020.101699</pub-id>
</citation>
</ref>
<ref id="B44">
<label>44</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hamamoto</surname> <given-names>R</given-names>
</name>
<name>
<surname>Suvarna</surname> <given-names>K</given-names>
</name>
<name>
<surname>Yamada</surname> <given-names>M</given-names>
</name>
<name>
<surname>Kobayashi</surname> <given-names>K</given-names>
</name>
<name>
<surname>Shinkai</surname> <given-names>N</given-names>
</name>
<name>
<surname>Miyake</surname> <given-names>M</given-names>
</name>
<etal/>
</person-group>. <article-title>Application of artificial intelligence technology in oncology: Towards the establishment of precision medicine</article-title>. <source>Cancers</source> (<year>2020</year>) <volume>12</volume>:<fpage>3532</fpage>. doi: <pub-id pub-id-type="doi">10.3390/cancers12123532</pub-id>
</citation>
</ref>
<ref id="B45">
<label>45</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mascagni</surname> <given-names>P</given-names>
</name>
<name>
<surname>Alapatt</surname> <given-names>D</given-names>
</name>
<name>
<surname>Garcia</surname> <given-names>A</given-names>
</name>
<name>
<surname>Okamoto</surname> <given-names>N</given-names>
</name>
<name>
<surname>Vardazaryan</surname> <given-names>A</given-names>
</name>
<name>
<surname>Costamagna</surname> <given-names>G</given-names>
</name>
<etal/>
</person-group>. <article-title>Surgical data science for safe cholecystectomy: A protocol for segmentation of hepatocystic anatomy and assessment of the critical view of safety</article-title>. <source>arXiv</source> (<year>2021</year>) <volume>arXiv:2106</volume>:<fpage>10916</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.48550/ARXIV.2106.10916</pub-id>
</citation>
</ref>
<ref id="B46">
<label>46</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ward</surname> <given-names>TM</given-names>
</name>
<name>
<surname>Mascagni</surname> <given-names>P</given-names>
</name>
<name>
<surname>Madani</surname> <given-names>A</given-names>
</name>
<name>
<surname>Padoy</surname> <given-names>N</given-names>
</name>
<name>
<surname>Perretta</surname> <given-names>S</given-names>
</name>
<name>
<surname>Hashimoto</surname> <given-names>DA</given-names>
</name>
</person-group>. <article-title>Surgical data science and artificial intelligence for surgical education</article-title>. <source>J Surg Oncol</source> (<year>2021</year>) <volume>124</volume>:<page-range>221&#x2013;30</page-range>. doi: <pub-id pub-id-type="doi">10.1002/jso.26496</pub-id>
</citation>
</ref>
<ref id="B47">
<label>47</label>
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Quero</surname> <given-names>G</given-names>
</name>
<name>
<surname>Mascagni</surname> <given-names>P</given-names>
</name>
<name>
<surname>Kolbinger</surname> <given-names>FR</given-names>
</name>
<name>
<surname>Fiorillo</surname> <given-names>C</given-names>
</name>
<name>
<surname>De Sio</surname> <given-names>D</given-names>
</name>
<name>
<surname>Longo</surname> <given-names>F</given-names>
</name>
<etal/>
</person-group>. <article-title>Artificial intelligence in colorectal cancer surgery: Present and future perspectives</article-title>. <source>Cancers (Basel)</source> (<year>2022</year>) <volume>14</volume>(<issue>15</issue>):<elocation-id>3803</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/cancers14153803</pub-id>
</citation>
</ref>
</ref-list>
</back>
</article>