<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" 'JATS-journalpublishing1-3-mathml3.dtd'>
<article article-type="editorial" dtd-version="1.3" xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Robot. AI</journal-id>
<journal-title-group>
<journal-title>Frontiers in Robotics and AI</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Robot. AI</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">2296-9144</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">1740881</article-id>
<article-id pub-id-type="doi">10.3389/frobt.2025.1740881</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Editorial</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Editorial: Autonomous robotic systems in aquaculture: research challenges and industry needs</article-title>
<alt-title alt-title-type="left-running-head">Kelasidi et al.</alt-title>
<alt-title alt-title-type="right-running-head">
<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1740881">10.3389/frobt.2025.1740881</ext-link>
</alt-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Kelasidi</surname>
<given-names>Eleni</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="corresp" rid="c001">&#x2a;</xref>
<uri xlink:href="https://loop.frontiersin.org/people/637360"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing - original draft</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Triantafyllou</surname>
<given-names>Michael</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/301949"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Ohrem</surname>
<given-names>Sveinung Johan</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/966070"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x26; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/">Writing - review and editing</role>
</contrib>
</contrib-group>
<aff id="aff1">
<label>1</label>
<institution>Department of Mechanical and Industrial Engineering, NTNU</institution>, <city>Trondheim</city>, <country country="NO">Norway</country>
</aff>
<aff id="aff2">
<label>2</label>
<institution>Department of Aquaculture Technology, SINTEF Ocean</institution>, <city>Trondheim</city>, <country country="NO">Norway</country>
</aff>
<aff id="aff3">
<label>3</label>
<institution>Department of Mechanical Engineering, MIT</institution>, <city>Cambridge</city>, <state>MA</state>, <country country="US">United States</country>
</aff>
<author-notes>
<corresp id="c001">
<label>&#x2a;</label>Correspondence: Eleni Kelasidi, <email xlink:href="mailto:eleni.kelasidi@ntnu.no">eleni.kelasidi@ntnu.no</email>
</corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2025-11-21">
<day>21</day>
<month>11</month>
<year>2025</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2025</year>
</pub-date>
<volume>12</volume>
<elocation-id>1740881</elocation-id>
<history>
<date date-type="received">
<day>06</day>
<month>11</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>11</day>
<month>11</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2025 Kelasidi, Triantafyllou and Ohrem.</copyright-statement>
<copyright-year>2025</copyright-year>
<copyright-holder>Kelasidi, Triantafyllou and Ohrem</copyright-holder>
<license>
<ali:license_ref start_date="2025-11-21">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<kwd-group>
<kwd>aquaculture</kwd>
<kwd>robotics</kwd>
<kwd>fish-machine interaction</kwd>
<kwd>welfare</kwd>
<kwd>machine vision and AI methods</kwd>
</kwd-group>
<funding-group>
<award-group id="gs1">
<funding-source id="sp1">
<institution-wrap>
<institution>Norges Forskningsr&#xe5;d</institution>
<institution-id institution-id-type="doi" vocab="open-funder-registry" vocab-identifier="10.13039/open_funder_registry">10.13039/501100005416</institution-id>
</institution-wrap>
</funding-source>
<award-id rid="sp1">327292</award-id>
<award-id rid="sp1">313737</award-id>
</award-group>
<funding-statement>The authors declare that financial support was received for the research and/or publication of this article. This work was supported by the Research Council of Norway (ResiFarm: NO-327292, CHANGE: N313737).</funding-statement>
</funding-group>
<counts>
<fig-count count="0"/>
<table-count count="0"/>
<equation-count count="0"/>
<ref-count count="0"/>
<page-count count="3"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-in-acceptance</meta-name>
<meta-value>Field Robotics</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
<notes notes-type="frontiers-research-topic">
<p>Editorial on the Research Topic <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/research-topics/67133">Autonomous robotic systems in aquaculture: research challenges and industry needs</ext-link>
</p>
</notes>
</front>
<body>
<sec sec-type="intro" id="s1">
<title>Introduction</title>
<p>This editorial summarizes the contributions to the Research Topic &#x201c;Autonomous Robotic Systems in Aquaculture: Research Challenges and Industry Needs&#x201d;, appearing in the Frontiers in Robotics and AI journal.</p>
<p>As the global population increases, the demand for sustainable and efficient food production continues to grow. Aquaculture, the fastest-expanding food sector, now provides a significant portion of global seafood. Despite its importance, it remains one of the most demanding and risk-prone industries, with challenges in Health, Safety, and Environment (HSE), dependence on manual labor, and reliance on human experience. To ensure its sustainable growth, aquaculture must adopt robotics, automation, and artificial intelligence. This Research Topic in Frontiers in Robotics and AI presents advances demonstrating how autonomous robotic systems are reshaping aquaculture by improving safety, efficiency, and animal welfare, guiding the industry toward digital and sustainable operations.</p>
</sec>
<sec id="s2">
<title>Robotics and intelligent sensing in aquaculture</title>
<p>The contributions span the aquaculture value chain, addressing robotic design, underwater sensing, navigation, fish&#x2013;robot interaction, and welfare monitoring. Central to these efforts is developing intelligent systems that operate reliably in complex and dynamic marine environments. By merging multi-sensor perception, machine vision, and adaptive control, these technologies are redefining farm management and advancing the vision of precision aquaculture.</p>
</sec>
<sec id="s3">
<title>Experimental platforms and behavioral research</title>
<p>A key innovation is the Cyber-Enhanced Tank (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1629884">Voskakis et al.</ext-link>), a sophisticated experimental platform combining event cameras, imaging sonars, and optical systems to simulate real aquaculture environments. This system enables non-invasive monitoring of fish behavior under controlled yet realistic conditions. It bridges laboratory research with industrial practice, supporting the design and validation of new robotic and sensing technologies essential for welfare-conscious automation.</p>
</sec>
<sec id="s4">
<title>Deep learning and underwater perception</title>
<p>Autonomous operation underwater faces challenges such as low visibility and dynamic conditions. Advances in computer vision and deep learning have enabled robust 3D tracking of fish and environmental features (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1628213">F&#xf8;re et al.</ext-link>). Neural models integrated within the Robot Operating System (ROS2) can estimate fish&#x2013;vehicle distance and behavioral response in real time, allowing robots to adapt behaviorally and minimize stress. This development is a step toward ethical, biologically aware automation in aquaculture.</p>
</sec>
<sec id="s5">
<title>Autonomous inspection and maintenance</title>
<p>Inspection and maintenance of underwater infrastructure are vital yet hazardous and costly when performed manually. A vision-based inspection system implemented on the Blueye X3 drone uses real-time image enhancement, AI-based object detection, and visual servoing for autonomous operation (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1655242">Nguyen et al.</ext-link>). Built on open-source frameworks, it lowers costs while improving safety and data quality. Such technologies exemplify how scalable, affordable robotics can benefit aquaculture and offshore industries alike.</p>
</sec>
<sec id="s6">
<title>Fish welfare and environmental sensitivity</title>
<p>Sustainability in aquaculture depends on animal welfare. Studies on Atlantic salmon have shown that certain acoustic frequencies&#x2013;especially near 400 Hz&#x2013;trigger avoidance behavior, while light intensity and depth strongly influence stress levels (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1657567">Zhang et al.</ext-link>). These findings inform robotic system design, ensuring propulsion noise, light, and sensor emissions are configured to reduce stress and maintain welfare standards.</p>
</sec>
<sec id="s7">
<title>Mapping, localization, and digital twins</title>
<p>Accurate mapping and localization are fundamental to robotic autonomy. Combining monocular depth prediction with acoustic sensing allows unmanned underwater vehicles (UUVs) to estimate positions and create detailed 3D maps in real time (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1609765">Job et al.</ext-link>). These capabilities enable precise inspection, continuous monitoring, and digital twin creation of aquaculture sites, paving the way for intelligent and integrated farm management systems.</p>
</sec>
<sec id="s8">
<title>Artificial intelligence in ecosystem monitoring</title>
<p>Beyond aquaculture production, AI is also advancing environmental monitoring. The ODYSSEE model, designed for oyster identification, was compared with human experts and non-experts (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1587033">Campbell et al.</ext-link>). Although AI achieved faster results with slightly lower accuracy, the study underscores its potential for non-invasive biodiversity assessment and habitat conservation. Improved datasets and annotation methods will enhance such applications, enabling scalable, AI-driven ecosystem monitoring.</p>
</sec>
<sec id="s9">
<title>Behavioral analytics for welfare and productivity</title>
<p>Computer vision enables real-time monitoring of fish behavior in sea cages. By analyzing correlations between fish activity, depth distribution, and environmental conditions such as temperature, wave motion, and light, researchers identified behavioral adaptations that reduce exposure to stressors (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frobt.2025.1574161">Burke et al.</ext-link>). Continuous monitoring supports adaptive management, offering a foundation for welfare-based automation that improves both productivity and fish health.</p>
</sec>
<sec id="s10">
<title>Toward an intelligent and sustainable aquaculture future</title>
<p>Collectively, these studies illustrate how robotics, AI, and sensing technologies are converging to transform aquaculture into a data-driven, efficient, and ethically grounded industry. The combination of perception, control, and autonomy with welfare-focused design represents a major step toward Industry 4.0 for the marine domain.</p>
<p>The innovations summarized here underscore the essential role of technology in ensuring a sustainable future for food production from the sea. From intelligent sensing and mapping systems to welfare-aware robots and digital twins, these developments enable precision aquaculture that balances productivity with environmental responsibility. As the need for sustainable protein grows, robotics and AI are not merely technological tools&#x2013;they are vital to ensuring that aquaculture continues to feed the world reliably and responsibly.</p>
</sec>
</body>
<back>
<sec sec-type="author-contributions" id="s11">
<title>Author contributions</title>
<p>EK: Writing &#x2013; review and editing, Writing &#x2013; original draft. MT: Writing &#x2013; review and editing. SO: Writing &#x2013; review and editing.</p>
</sec>
<ack>
<title>Acknowledgements</title>
<p>We express our sincere appreciation to the authors of the papers included in this Research Topic for their valuable scientific contributions and to the referees for their thorough and constructive reviews.</p>
</ack>
<sec sec-type="COI-statement" id="s13">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial 78 relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="ai-statement" id="s14">
<title>Generative AI statement</title>
<p>The authors declare that no Generative AI was used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p>
</sec>
<sec sec-type="disclaimer" id="s15">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<fn-group>
<fn fn-type="custom" custom-type="reviewed-by">
<p>
<bold>Edited and reviewed by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/165967/overview">Dongbing Gu</ext-link>, University of Essex, United Kingdom</p>
</fn>
</fn-group>
</back>
</article>