<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="review-article" dtd-version="1.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Oncol.</journal-id>
<journal-title-group>
<journal-title>Frontiers in Oncology</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Oncol.</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">2234-943X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fonc.2025.1730628</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Review</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Deep learning in renal ultrasound: applications, challenges, and future outlook</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Zhang</surname><given-names>Yong</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/2843821/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="software" vocab-term-identifier="https://credit.niso.org/contributor-roles/software/">Software</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project-administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="visualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/visualization/">Visualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Hou</surname><given-names>Yao</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3326855/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Qiu</surname><given-names>Tingting</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3326847/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Zhuang</surname><given-names>Yan</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3326827/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Chen</surname><given-names>Ke</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3326818/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project-administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Ling</surname><given-names>Wenwu</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3326813/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="visualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/visualization/">Visualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Luo</surname><given-names>Yan</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/2976887/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project-administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Lin</surname><given-names>Jiangli</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>*</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3326798/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project-administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
</contrib>
</contrib-group>
<aff id="aff1"><label>1</label><institution>College of Biomedical Engineering, Sichuan University</institution>, <city>Chengdu</city>,&#xa0;<country country="cn">China</country></aff>
<aff id="aff2"><label>2</label><institution>Department of Medical Ultrasound, West China Hospital, Sichuan University</institution>, <city>Chengdu</city>,&#xa0;<country country="cn">China</country></aff>
<author-notes>
<corresp id="c001"><label>*</label>Correspondence: Jiangli Lin, <email xlink:href="mailto:linjiangli@scu.edu.cn">linjiangli@scu.edu.cn</email></corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-01-12">
<day>12</day>
<month>01</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2025</year>
</pub-date>
<volume>15</volume>
<elocation-id>1730628</elocation-id>
<history>
<date date-type="received">
<day>23</day>
<month>10</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>18</day>
<month>12</month>
<year>2025</year>
</date>
<date date-type="rev-recd">
<day>24</day>
<month>11</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2026 Zhang, Hou, Qiu, Zhuang, Chen, Ling, Luo and Lin.</copyright-statement>
<copyright-year>2026</copyright-year>
<copyright-holder>Zhang, Hou, Qiu, Zhuang, Chen, Ling, Luo and Lin</copyright-holder>
<license>
<ali:license_ref start_date="2026-01-12">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<p>Kidney disease poses a significant global health burden, often progressing to end-stage renal disease with serious complications. Renal ultrasound, which is real-time, accessible, and noninvasive, serves as a primary imaging tool for evaluating renal structure and pathology. However, its diagnostic accuracy is limited by interobserver variability. Artificial intelligence (AI), particularly deep learning (DL), offers a promising solution for enhancing objectivity and automation throughout the renal ultrasound workflow. This review systematically summarizes DL applications across key tasks&#x2014;including kidney segmentation, volume measurement, functional prediction, and disease diagnosis&#x2014;and evaluates the performance of models such as CNNs and transformers. The results indicate that DL has significantly improved the accuracy and efficiency of kidney disease analysis, including chronic kidney disease (CKD), but challenges remain in terms of data quality, model interpretability, generalizations, and clinical integration. In the future, the combination of DL with multimodal data, large model technology, federated learning and interpretable artificial intelligence will be essential to achieve intelligence, standardization and personalization of renal ultrasound.</p>
</abstract>
<kwd-group>
<kwd>renal ultrasound</kwd>
<kwd>deep learning</kwd>
<kwd>chronic kidney disease (CKD)</kwd>
<kwd>multimodal data</kwd>
<kwd>large model technology</kwd>
</kwd-group>
<funding-group>
<funding-statement>The author(s) declared that financial support was received for this work and/or its publication. This research was supported by Sichuan Province Science and Technology Support Program 2025ZNSFSC1760.</funding-statement>
</funding-group>
<counts>
<fig-count count="2"/>
<table-count count="4"/>
<equation-count count="0"/>
<ref-count count="148"/>
<page-count count="14"/>
<word-count count="7194"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Genitourinary Oncology</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<title>Introduction</title>
<p>Renal diseases have become a major challenge in global public health (<xref ref-type="bibr" rid="B1">1</xref>). CKD affects more than 850 million people worldwide and is one of the leading causes of death (<xref ref-type="bibr" rid="B2">2</xref>). Owing to its advantages of noninvasiveness, real-time imaging, and economy, ultrasound has become the core imaging method for diagnosing and treating renal diseases. As a primary diagnostic method, it can clearly display the structure of the kidneys (size, shape, and cortical thickness) and the state of the collecting system and has high sensitivity for detecting structural abnormalities such as hydronephrosis and renal stones (<xref ref-type="bibr" rid="B3">3</xref>&#x2013;<xref ref-type="bibr" rid="B5">5</xref>). Doppler technology can also assess renal vascular hemodynamics and assist in diagnosing functional lesions (<xref ref-type="bibr" rid="B6">6</xref>&#x2013;<xref ref-type="bibr" rid="B8">8</xref>). In terms of disease detection, ultrasound can detect various renal lesions, such as congenital abnormalities, stones, cysts, and tumorous lesions (<xref ref-type="bibr" rid="B9">9</xref>). In kidney transplant patients, ultrasound is indispensable for evaluating the function and vascular complications of the transplanted kidney (<xref ref-type="bibr" rid="B10">10</xref>, <xref ref-type="bibr" rid="B11">11</xref>). New technologies such as ultrasound elastography can also be used to assess the degree of renal fibrosis quantitatively (<xref ref-type="bibr" rid="B12">12</xref>). However, traditional ultrasound has significant limitations: (1) it is highly dependent on the operator, and different physicians have low consistency in judging small tumors; (2) it relies mainly on qualitative assessment and lacks objective quantitative indicators such as elastic parameters; and (3) the diagnostic efficiency for complex cases is limited (<xref ref-type="bibr" rid="B13">13</xref>, <xref ref-type="bibr" rid="B14">14</xref>).</p>
<p>In recent years, AI technology has provided a revolutionary solution to overcome the limitations of traditional diagnosis (<xref ref-type="bibr" rid="B15">15</xref>, <xref ref-type="bibr" rid="B16">16</xref>). In the field of renal ultrasound, AI has evolved from traditional machine learning to deep learning, significantly improving the accuracy and efficiency of image analysis. Convolutional neural networks (CNNs) excel in local feature extraction and perform well in kidney image classification and segmentation (<xref ref-type="bibr" rid="B17">17</xref>, <xref ref-type="bibr" rid="B18">18</xref>). ResNet solves the problem of gradient disappearance in deep networks through residual connections, improving the accuracy of identifying complex renal boundaries (<xref ref-type="bibr" rid="B19">19</xref>). The self-attention mechanism of the transformer model can capture global feature correlations, helping to analyze the spatial relationship between the kidney and surrounding tissues (<xref ref-type="bibr" rid="B20">20</xref>). With the optimization of deep learning algorithms, renal ultrasound diagnosis is shifting from an experience-dependent mode to an intelligent and standardized mode, providing a new path for improving the effectiveness of renal disease diagnosis and treatment (<xref ref-type="bibr" rid="B21">21</xref>, <xref ref-type="bibr" rid="B22">22</xref>). In recent years, several reviews on AI in the diagnosis and treatment of kidney diseases have been published. For example, De et&#xa0;al. (<xref ref-type="bibr" rid="B15">15</xref>) outlined the potential of deep learning in renal ultrasound from the perspective of a technical basis and clinical application, but their discussions are mostly limited to algorithm performance and lack systematic construction of clinical transformation pathways. Although Xu et&#xa0;al. (<xref ref-type="bibr" rid="B16">16</xref>) further explored the multitask application of AI in renal ultrasound, their research focused mainly on technical reviews, and no specific technology integration scheme or expansion center collaboration mechanism was proposed. This article not only focuses on the full-chain application of AI in renal ultrasound (from image acquisition to clinical decision support) but also systematically summarizes its application status and frontier progress in kidney segmentation, functional assessment, disease diagnosis and other aspects. It also proposes the construction of a structured framework for clinical transformation for the first time and deeply discusses frontier directions such as multimodal fusion, federated learning and large models. This study provides a systematic reference for promoting the standardized application and precise decision-making of AI technology in renal ultrasound diagnosis. The main framework of this article is shown in <xref ref-type="fig" rid="f1"><bold>Figure&#xa0;1</bold></xref>.</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>The main framework of this article.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fonc-15-1730628-g001.tif">
<alt-text content-type="machine-generated">Diagram outlining deep learning in renal ultrasound, showing applications, challenges, and future outlook. Applications include segmentation, volume measurement, function prediction, and disease diagnosis. Challenges are data-related, technology-related, and clinical integration issues. Future outlook involves integrating emerging technologies, intelligent diagnosis systems, and multidisciplinary cooperation.</alt-text>
</graphic></fig>
</sec>
<sec id="s2">
<title>Methodology</title>
<p>This study retrieved all the articles from the PubMed and Web of Science databases up to July 30, 2025. The search terms used were &#x201c;artificial intelligence&#x201d;, &#x201c;ultrasound&#x201d;, &#x201c;kidney&#x201d;, &#x201c;renal&#x201d;, and related terms. The literature screening process is shown in <xref ref-type="fig" rid="f2"><bold>Figure&#xa0;2</bold></xref>. We followed the PRISMA guidelines and initially retrieved 426 articles from the PubMed and Google Scholar databases. After removing duplicates, 280 articles were retained for the screening stage. In the first round of screening on the basis of titles/abstracts, 117 articles were excluded for not meeting the criteria (exclusion reasons: articles that did not use the DL method/retracted articles). The remaining 163 articles were included in the full-text review stage. Following a detailed evaluation, 65 articles were excluded (for exclusion reasons: non-English literature/conference abstracts that did not provide complete data, reviews, etc./animal experiments with nonhuman subjects). Ultimately, 98 articles were included in this review.</p>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>Selection criteria.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fonc-15-1730628-g002.tif">
<alt-text content-type="machine-generated">Flowchart of a systematic review process with four stages: Identification, Screening, Eligibility, Inclusion. Starts with 426 publications from PubMed and Google Scholar. After removing duplicates, 280 remain. Title/abstract screening excluded 117 for not being DL methods or retracted, leaving 163 for full-text review. Finally, 65 were excluded for reasons like non-English documents and insufficient data, resulting in 98 included publications.</alt-text>
</graphic></fig>
</sec>
<sec id="s3">
<title>Technical foundation</title>
<p>The technical foundation of AI in the field of renal ultrasound relies mainly on core algorithm paradigms such as supervised learning, unsupervised learning, transfer learning, and multitask learning, which jointly drive a series of breakthroughs from image processing to advanced cognitive tasks (<xref ref-type="bibr" rid="B23">23</xref>, <xref ref-type="bibr" rid="B24">24</xref>). Currently, supervised learning is the mainstream method, with its core lies in the use of expert-labeled data to train models to learn the mapping relationships between inputs and outputs (<xref ref-type="bibr" rid="B25">25</xref>, <xref ref-type="bibr" rid="B26">26</xref>). In renal ultrasound image analysis, the following types of deep neural network architectures play a key role:</p>
<p>In 2015, the proposal of U-Net created an encoder-decoder architecture for medical image segmentation, which laid the foundation for kidney ultrasound segmentation. The subsequent variant (UNet++/nnU-Net) became the benchmark model for kidney segmentation. The outstanding local feature extraction ability of the convolutional neural network (CNN) significantly promotes the classification and segmentation performance of renal ultrasound images (<xref ref-type="bibr" rid="B27">27</xref>, <xref ref-type="bibr" rid="B28">28</xref>). Studies have shown that CNNs perform well in tasks such as automatic segmentation and volume measurement of kidneys, especially in the automatic identification of hydronephrosis, where models based on CNNs can accurately capture the characteristic manifestations in images and effectively improve diagnostic consistency (<xref ref-type="bibr" rid="B29">29</xref>). ResNet alleviates the problem of gradient disappearance in deep networks by introducing residual connections, improving the recognition accuracy of complex anatomical structures (<xref ref-type="bibr" rid="B30">30</xref>). In renal ultrasound, structures such as ResNet50 have been proven to be able to more clearly identify renal boundaries and internal structures, indicating high value for CKD staging and renal tumor differentiation. DenseNet promotes feature reuse through dense connections, showing significant advantages in the case of limited training samples. In response to the characteristics of ultrasound images, attention U-Net introduces an attention mechanism to highlight key areas; U-Net++ improves the segmentation ability of small targets through nested skip connections (<xref ref-type="bibr" rid="B31">31</xref>); and nnU-Net becomes the benchmark model for multicenter research through an automatic configuration strategy (<xref ref-type="bibr" rid="B32">32</xref>). In 2017, the transformer model was born. Its self-attention mechanism solves the pain point of insufficient global feature capture by the CNN, which can effectively capture the global context relationship and significantly improve the modeling ability of the spatial relationship between the kidney and the surrounding tissues. Promote the upgrade of diagnosis from &#x201c;local feature dependence&#x201d; to &#x201c;global relationship modeling&#x201d; (<xref ref-type="bibr" rid="B20">20</xref>).</p>
<p>Unsupervised learning has unique value in scenarios with scarce labeled data in renal ultrasound. This method effectively expands the boundaries of supervised learning by mining the intrinsic patterns in unlabeled data. The main technical paths include 1) clustering analysis, which can be used to identify tissue characteristics of the renal cortex, medulla, etc., or distinguish different stages of chronic renal disease populations; 2) an autoencoder (AE), through an encoding-decoding structure to learn compact representations, whose derivative models, such as a denoising autoencoder (DAE), can improve image quality, and a variational autoencoder (VAE) can generate synthetic samples that conform to the real distribution to expand data; and 3) a generative adversarial network (GAN), through the game mechanism of the generator and discriminator, can synthesize realistic pathological images, alleviating the problem of insufficient samples (<xref ref-type="bibr" rid="B33">33</xref>). In 2020, cross-modal fusion technology has matured, and generative models such as CycleGAN have realized domain conversion from CT to ultrasound, overcoming the bottleneck of the scarcity of renal ultrasound labeling data and improving the segmentation accuracy in small sample scenes. These methods perform well in tasks such as feature learning, anomaly detection, and domain adaptation (<xref ref-type="bibr" rid="B34">34</xref>).</p>
<p>Transfer learning is another important way to alleviate the shortage of labeled data (<xref ref-type="bibr" rid="B35">35</xref>). Its core idea is to transfer the general features pretrained on large-scale source domains to the renal ultrasound task. Common strategies include the following: 1) feature extraction: fix the convolutional layer weights of the pretrained model as the feature extractor; and 2) fine-tuning: partially unlock the network layers and use the target data for iterative optimization. Practice has shown that this method can significantly reduce the dependence on the annotation scale, accelerate convergence, and improve the generalization performance. For example, by leveraging cross-modal transfer learning methods, accurate segmentation of renal ultrasound images can be achieved even under limited annotation conditions (<xref ref-type="bibr" rid="B36">36</xref>). Moreover, domain adaptation techniques further enhance the model&#x2019;s robustness across different devices or centers by aligning the distributions between the source domain (annotated data) and the target domain (unannotated clinical data).</p>
<p>Multitask learning (MTL) efficiently utilizes annotation information by sharing underlying features and simultaneously optimizing multiple related tasks (such as segmentation, classification and volume measurement) 37. In renal ultrasound, MTL has three advantages: first, it improves overall performance by leveraging task correlations (such as using segmentation to assist volume estimation); second, it enhances generalization ability through knowledge transfer; and third, it reduces resource consumption for multi-model deployment. Since 2023: deep integration of large models and multimodal data. Through the combination of pretrained large models and cross-modal alignment technology, deep integration of ultrasound, CT, and genomic data has been achieved, which has improved the diagnostic accuracy of rare kidney disease and promoted the technology to expand from &#x201c;single-task optimization&#x201d; to &#x201c;full-cycle kidney disease management&#x201d; (<xref ref-type="bibr" rid="B34">34</xref>&#x2013;<xref ref-type="bibr" rid="B36">36</xref>).</p>
<p>The three core algorithmic classes (CNNs vs. transformers vs. multimodal fusion) in the field of renal ultrasound DL differ significantly in terms of their technical characteristics, applicable scenarios and performance. The strengths, limitations and typical clinical suitability of these methods are shown in <xref ref-type="table" rid="T1"><bold>Table&#xa0;1</bold></xref>.</p>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Strengths, limitations and typical clinical suitability of the three core algorithmic classes.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="left">Algorithms</th>
<th valign="top" align="left">Strengths</th>
<th valign="top" align="left">Limitations</th>
<th valign="top" align="left">Typical clinical suitability</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">CNNs</td>
<td valign="top" align="left">Strong local feature extraction ability, low training cost and high robustness</td>
<td valign="top" align="left">Global feature capture is insufficient and the effect of weak boundary segmentation is limited</td>
<td valign="top" align="left">Renal segmentation and volume measurement, lesion localization</td>
</tr>
<tr>
<td valign="top" align="left">Transformers</td>
<td valign="top" align="left">Self-attention mechanisms capture global spatial relationships and long-distance dependence</td>
<td valign="top" align="left">High computational complexity, sensitive to small samples, requires a large amount of pretraining data</td>
<td valign="top" align="left">Diagnosis of complex lesions, multiview image fusion</td>
</tr>
<tr>
<td valign="top" align="left">Multimodal fusion</td>
<td valign="top" align="left">Integrating complementary information to improve the comprehensiveness and accuracy of diagnosis</td>
<td valign="top" align="left">difficult to standardize the data and the design of the fusion mechanism is complex</td>
<td valign="top" align="left">Prediction of renal function and diagnosis of rare nephropathy</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s4">
<title>The application of AI technology in renal ultrasound</title>
<sec id="s4_1">
<title>Renal segmentation and volume measurement</title>
<p>Renal ultrasound image segmentation and its derived volume measurement technology together constitute the core technical system of AI analysis of kidney diseases, and they show a close basic and extended relationship in clinical application (<xref ref-type="bibr" rid="B37">37</xref>&#x2013;<xref ref-type="bibr" rid="B41">41</xref>). Segmentation is the premise of accurate measurement, and its accuracy directly determines the reliability of volume quantification, morphological evaluation and lesion localization (<xref ref-type="bibr" rid="B42">42</xref>&#x2013;<xref ref-type="bibr" rid="B44">44</xref>). In turn, the clinical demand of volume measurement promotes the segmentation algorithm to be accurate and scenario iterative, forming a complete chain from morphological recognition to functional evaluation (<xref ref-type="bibr" rid="B45">45</xref>). Accurate segmentation and measurement have multiple synergistic meanings. First, three-dimensional reconstruction of the renal cortex, medulla and collecting system can be achieved by automatic segmentation, which can accurately calculate the total renal volume and cortical thickness and provide key quantitative indicators for the assessment of CKD progression and monitoring of renal allograft function. Studies have shown that renal cortical volume is significantly positively correlated with the glomerular filtration rate and that progressive volume reduction is an independent predictor of CKD progression (<xref ref-type="bibr" rid="B46">46</xref>). Second, the volume ratio of the renal pelvis to the renal parenchyma obtained by segmentation can provide an important basis for the timing of surgical intervention in patients with hydronephrosis, and the ratio is significantly related to the degree of renal function injury (<xref ref-type="bibr" rid="B47">47</xref>). Third, in clinical operations such as tumor ablation planning, accurate segmentation is the cornerstone of lesion localization and classification, whereas monitoring dynamic volume changes can indicate graft rejection or dysfunction early (<xref ref-type="bibr" rid="B48">48</xref>, <xref ref-type="bibr" rid="B49">49</xref>).</p>
<p>Traditional methods (such as the threshold method, region growing method and active contour model) are limited by problems such as uneven gray levels in ultrasound images, noise interference and weak boundaries, which make it difficult to meet the requirements of boundary accuracy for volume measurement (<xref ref-type="bibr" rid="B50">50</xref>, <xref ref-type="bibr" rid="B51">51</xref>). In recent years, deep learning technology has promoted the breakthrough progress of both methods. <xref ref-type="table" rid="T2"><bold>Table&#xa0;2</bold></xref> lists several representative studies on renal segmentation and volume measurements. In terms of segmentation algorithm innovation, Song et&#xa0;al. (<xref ref-type="bibr" rid="B52">52</xref>) and Guo et&#xa0;al. (<xref ref-type="bibr" rid="B53">53</xref>) used cross-modal data enhancement techniques (CycleGAN, CUT network) to effectively alleviate the problem of scarcity of labeled data through domain conversion from CT to ultrasound. For low-resolution images, Khan et&#xa0;al. (<xref ref-type="bibr" rid="B54">54</xref>) proposed that MLOU-Net introduces a deep supervised attention mechanism and hybrid loss function to achieve a 90.21% Dice coefficient on low-resolution renal ultrasound images. Alex et&#xa0;al. (<xref ref-type="bibr" rid="B55">55</xref>) designed the boundary feature enhancing network YSegNet combined with a long and short jump connection mechanism, and the Dice coefficient still reached 97% under the weak boundary challenge. Chen et&#xa0;al. (<xref ref-type="bibr" rid="B56">56</xref>) designed a multiscale feature fusion architecture (MSIP and MOS) to aggregate renal features of different scales, and the Dice index was 95.86%. Nipuna et&#xa0;al. (<xref ref-type="bibr" rid="B57">57</xref>) used 3D and multimodal fusion technology (3D U-Net fusion B-mode and power Doppler data) to achieve high-precision volume segmentation of the fetal kidney. Innovations in these segmentation techniques directly enable volumetric measurement applications. Jaidip M et&#xa0;al. (<xref ref-type="bibr" rid="B59">59</xref>) reported that the segmentation results of 3D ultrasound automatic measurements of ADPKD patients based on 2D U-Net and transfer learning were highly consistent with those of MRI (Dice=80%). The fast-unet++ proposed by Oghli et&#xa0;al. (<xref ref-type="bibr" rid="B60">60</xref>) can achieve high-precision segmentation (DSC&gt;95%) of the sagittal and transverse planes and simultaneously predict multidimensional parameters such as renal length, width, thickness and volume. Kim et&#xa0;al. (<xref ref-type="bibr" rid="B61">61</xref>) developed a hybrid learning method of U-Net and an active contour model for the automatic calculation of renal volume in children, which was highly correlated with the CT measurement results (ICC = 0.925). Esser et&#xa0;al. (<xref ref-type="bibr" rid="B62">62</xref>) verified good interobserver agreement (ICC 0.83&#x2013;0.94) via semiautomatic 3D ultrasound segmentation in the assessment of pediatric hydronephrosis.</p>
<table-wrap id="T2" position="float">
<label>Table&#xa0;2</label>
<caption>
<p>Application of DL technology in renal segmentation and volume measurements.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="center">Author, Ref</th>
<th valign="top" align="center">Year</th>
<th valign="top" align="center">Algorithms</th>
<th valign="top" align="center">Data source</th>
<th valign="top" align="center">Size</th>
<th valign="top" align="center">Goals/Approach</th>
<th valign="top" align="center">Results</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Yuxin Song et&#xa0;al. (<xref ref-type="bibr" rid="B52">52</xref>)</td>
<td valign="top" align="left">2022</td>
<td valign="top" align="left">CycleGAN</td>
<td valign="top" align="left">Single center, Public Dataset</td>
<td valign="top" align="left">391 images, 210 volumes</td>
<td valign="top" align="left">Cross-modal transfer learning</td>
<td valign="top" align="left">DSC=85.3%</td>
</tr>
<tr>
<td valign="top" align="left">Shuaizi Guo et&#xa0;al. (<xref ref-type="bibr" rid="B53">53</xref>)</td>
<td valign="top" align="left">2024</td>
<td valign="top" align="left">Seg-CycleGAN U-Net</td>
<td valign="top" align="left">Single center, Public Dataset</td>
<td valign="top" align="left">4883 images,<break/>210 volumes</td>
<td valign="top" align="left">Improved model of CycleGAN</td>
<td valign="top" align="left">DSC=85.48%</td>
</tr>
<tr>
<td valign="top" align="left">Rashid Khan et&#xa0;al. (<xref ref-type="bibr" rid="B54">54</xref>)</td>
<td valign="top" align="left">2024</td>
<td valign="top" align="left">MLAU-Net</td>
<td valign="top" align="left">Single center, Public Dataset</td>
<td valign="top" align="left">44880 images</td>
<td valign="top" align="left">A deep supervised attention mechanism and a hybrid loss function are combined</td>
<td valign="top" align="left">DSC=90.21%</td>
</tr>
<tr>
<td valign="top" align="left">Deepthy Mary Alex et&#xa0;al. (<xref ref-type="bibr" rid="B55">55</xref>)</td>
<td valign="top" align="left">2022</td>
<td valign="top" align="left">YSegNet</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">700 images</td>
<td valign="top" align="left">Based on encoder, decoder and boundary extraction network</td>
<td valign="top" align="left">DSC=97%</td>
</tr>
<tr>
<td valign="top" align="left">Gongping Chen et&#xa0;al. (<xref ref-type="bibr" rid="B56">56</xref>)</td>
<td valign="top" align="left">2022</td>
<td valign="top" align="left">CNN</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">7350 images</td>
<td valign="top" align="left">New CNN</td>
<td valign="top" align="left">DSC=95.86%</td>
</tr>
<tr>
<td valign="top" align="left">Nipuna H et&#xa0;al. (<xref ref-type="bibr" rid="B57">57</xref>)</td>
<td valign="top" align="left">2021</td>
<td valign="top" align="left">3D U-Net, UNet++</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">780 volumes</td>
<td valign="top" align="left">3D U-Net/UNet++ combines 3D B-Mode and PD data</td>
<td valign="top" align="left">DSC=81%</td>
</tr>
<tr>
<td valign="top" align="left">Rashid Khan et&#xa0;al. (<xref ref-type="bibr" rid="B33">33</xref>)</td>
<td valign="top" align="left">2024</td>
<td valign="top" align="left">MLOU-Net</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">44880 images</td>
<td valign="top" align="left">Improve the deep neural network architecture and postprocessing methods</td>
<td valign="top" align="left">DSC=89.76%</td>
</tr>
<tr>
<td valign="top" align="left">Shuaizi Guo et&#xa0;al. (<xref ref-type="bibr" rid="B58">58</xref>)</td>
<td valign="top" align="left">2025</td>
<td valign="top" align="left">CUT, U-Net, CycleGAN</td>
<td valign="top" align="left">Single center,<break/>Public Dataset</td>
<td valign="top" align="left">4594 images,<break/>210 volumes</td>
<td valign="top" align="left">A cross-modal data augmentation method based on CUT</td>
<td valign="top" align="left">DSC=77.19%</td>
</tr>
<tr>
<td valign="top" align="left">Jaidip M et&#xa0;al. (<xref ref-type="bibr" rid="B59">59</xref>)</td>
<td valign="top" align="left">2022</td>
<td valign="top" align="left">2D U-Net</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">22 patients</td>
<td valign="top" align="left">Automatic measurement of kidney volume by 3D ultrasound</td>
<td valign="top" align="left">DSC =80%</td>
</tr>
<tr>
<td valign="top" align="left">Mostafa Ghelich Oghli et&#xa0;al. (<xref ref-type="bibr" rid="B60">60</xref>)</td>
<td valign="top" align="left">2024</td>
<td valign="top" align="left">Mostafa Ghelich Oghli</td>
<td valign="top" align="left">3 Centers</td>
<td valign="top" align="left">744 images</td>
<td valign="top" align="left">Fast-Unet is optimized by nesting layers and deep supervision</td>
<td valign="top" align="left">DSC=95%</td>
</tr>
<tr>
<td valign="top" align="left">Dong-Wook Kim et&#xa0;al. (<xref ref-type="bibr" rid="B61">61</xref>)</td>
<td valign="top" align="left">2021</td>
<td valign="top" align="left">U-Net</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">331 children</td>
<td valign="top" align="left">U-Net + active profile</td>
<td valign="top" align="left">ICC=0.925</td>
</tr>
<tr>
<td valign="top" align="left">Michael Esser et&#xa0;al. (<xref ref-type="bibr" rid="B62">62</xref>)</td>
<td valign="top" align="left">2025</td>
<td valign="top" align="left">ICC, Dice</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">45 children, 48 volumes</td>
<td valign="top" align="left">Semiautomatic 3D ultrasound segmentation method</td>
<td valign="top" align="left">ICC=0.83~0.94</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Existing studies have focused mostly on normal renal structure, and the generalization ability of tumors, cysts, severe hydronephrosis and other pathological conditions has not been fully verified. Moreover, these methods generally rely on single-center data and lack large-scale external datasets and prospective clinical trial verification across devices and institutions (<xref ref-type="bibr" rid="B45">45</xref>, <xref ref-type="bibr" rid="B63">63</xref>). Future work should focus on developing robust segmentation algorithms for abnormal kidneys, constructing multi-disease and multicenter collaborative datasets, promoting the transformation of renal ultrasound AI systems from single-center research to real clinical scenarios, and verifying their feasibility and safety as routine diagnostic and treatment tools through multicenter clinical trials (<xref ref-type="bibr" rid="B64">64</xref>).</p>
</sec>
<sec id="s4_2">
<title>Renal function prediction</title>
<p>Renal function prediction is a key step in the diagnosis and treatment of renal diseases (<xref ref-type="bibr" rid="B65">65</xref>, <xref ref-type="bibr" rid="B66">66</xref>). Traditional methods rely mainly on biochemical indicators such as serum creatinine and urea nitrogen and use formulas such as CKD-EPI to estimate the glomerular filtration rate (GFR) (<xref ref-type="bibr" rid="B67">67</xref>, <xref ref-type="bibr" rid="B68">68</xref>). However, these indicators are easily affected by muscle mass, diet and other factors and are less sensitive to early kidney injury (<xref ref-type="bibr" rid="B69">69</xref>). Traditional ultrasound interpretation is subjective and lacks quantitative analysis ability (<xref ref-type="bibr" rid="B70">70</xref>). In recent years, DL technology has significantly improved the objectivity and efficiency of renal function assessment through deep fusion of multimodal ultrasound data and clinical indicators to reduce human error (<xref ref-type="bibr" rid="B71">71</xref>, <xref ref-type="bibr" rid="B72">72</xref>). Texture analysis can be used to extract functional information from images, overcome morphological limitations, and achieve quantitative descriptions of microstructures such as fibrosis and microangiopathy (<xref ref-type="bibr" rid="B73">73</xref>). In CKD, DL models can integrate multisource data such as electronic medical records, radiomics, and biomarkers to predict the risk of rapid progression of eGFR decline &#x2265;5 mL/min/1.73 m&#xb2; per year (<xref ref-type="bibr" rid="B74">74</xref>&#x2013;<xref ref-type="bibr" rid="B76">76</xref>). In acute kidney injury (AKI), DL systems can identify high-risk patients before biochemical changes occur (<xref ref-type="bibr" rid="B77">77</xref>&#x2013;<xref ref-type="bibr" rid="B80">80</xref>). <xref ref-type="table" rid="T3"><bold>Table&#xa0;3</bold></xref> lists several representative studies on renal function prediction.</p>
<table-wrap id="T3" position="float">
<label>Table&#xa0;3</label>
<caption>
<p>Application of DL technology in renal function prediction.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="left">Author, ref</th>
<th valign="top" align="left">Year</th>
<th valign="top" align="left">Algorithms</th>
<th valign="top" align="left">Data source</th>
<th valign="top" align="left">Size</th>
<th valign="top" align="left">Goals/approach</th>
<th valign="top" align="left">Results</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Ziman Chen et&#xa0;al. (<xref ref-type="bibr" rid="B81">81</xref>)</td>
<td valign="top" align="left">2023</td>
<td valign="top" align="left">Radiomics</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">160 patients</td>
<td valign="top" align="left">Combined with imaging features and clinical features</td>
<td valign="top" align="left">AUC=0.85</td>
</tr>
<tr>
<td valign="top" align="left">Han Yuan et&#xa0;al. (<xref ref-type="bibr" rid="B82">82</xref>)</td>
<td valign="top" align="left">2024</td>
<td valign="top" align="left">Combined model</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">332 patients</td>
<td valign="top" align="left">Viscoelastic imaging</td>
<td valign="top" align="left">AUC=0.91</td>
</tr>
<tr>
<td valign="top" align="left">Xinyue Huang et&#xa0;al. (<xref ref-type="bibr" rid="B83">83</xref>)</td>
<td valign="top" align="left">2025</td>
<td valign="top" align="left">Fisher</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">158 patients</td>
<td valign="top" align="left">Combined with clinical data, conventional ultrasound, shear wave elastography and vascular plane wave hypersensitivity imaging</td>
<td valign="top" align="left">Acc=84.7%</td>
</tr>
<tr>
<td valign="top" align="left">Yidan Tang et&#xa0;al. (<xref ref-type="bibr" rid="B84">84</xref>)</td>
<td valign="top" align="left">2024</td>
<td valign="top" align="left">Multimodal knowledge map</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">100 patients</td>
<td valign="top" align="left">Multimodal ultrasound</td>
<td valign="top" align="left">AUC=0.692</td>
</tr>
<tr>
<td valign="top" align="left">Ahmed M et&#xa0;al. (<xref ref-type="bibr" rid="B85">85</xref>)</td>
<td valign="top" align="left">2025</td>
<td valign="top" align="left">XAI-CKD</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">400 patients</td>
<td valign="top" align="left">Combined XAI-CKD model, BBFS feature selection, and SHAP interpretability analysis</td>
<td valign="top" align="left">AUC=1.0</td>
</tr>
<tr>
<td valign="top" align="left">Shuyuan Tian et&#xa0;al. (<xref ref-type="bibr" rid="B86">86</xref>)</td>
<td valign="top" align="left">2024</td>
<td valign="top" align="left">ResNet34</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">1049 patients</td>
<td valign="top" align="left">ResNet34 depth features and GLCM+HOG texture features were fused</td>
<td valign="top" align="left">AUC=0.781~0.931</td>
</tr>
<tr>
<td valign="top" align="left">Minyan Zhu et&#xa0;al. (<xref ref-type="bibr" rid="B87">87</xref>)</td>
<td valign="top" align="left">2024</td>
<td valign="top" align="left">SVM</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">117 patients</td>
<td valign="top" align="left">Combined with shear wave elastography, conventional ultrasound and color Doppler flow imaging</td>
<td valign="top" align="left">AUC=0.943</td>
</tr>
<tr>
<td valign="top" align="left">Fuzhe Ma et&#xa0;al. (<xref ref-type="bibr" rid="B65">65</xref>)</td>
<td valign="top" align="left">2020</td>
<td valign="top" align="left">HMANN</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">50 datasets</td>
<td valign="top" align="left">Fusion of features</td>
<td valign="top" align="left">Acc=97.5%</td>
</tr>
<tr>
<td valign="top" align="left">Fu Ying et&#xa0;al. (<xref ref-type="bibr" rid="B71">71</xref>)</td>
<td valign="top" align="left">2021</td>
<td valign="top" align="left">PCNN</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">20 patients</td>
<td valign="top" align="left">Ultrasound image enhancement algorithms</td>
<td valign="top" align="left">AUC=0.758</td>
</tr>
<tr>
<td valign="top" align="left">Chin-Chi Kuo et&#xa0;al. (<xref ref-type="bibr" rid="B88">88</xref>)</td>
<td valign="top" align="left">2019</td>
<td valign="top" align="left">ResNet-101, XGBoost</td>
<td valign="top" align="left">Single center</td>
<td valign="top" align="left">1299 patients</td>
<td valign="top" align="left">Deep fusion of ResNet-101 and XGBoost</td>
<td valign="top" align="left">AUC=0.904</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Ziman Chen et&#xa0;al. (<xref ref-type="bibr" rid="B81">81</xref>) used radiomics to extract many quantitative features and combined them with clinical indicators to construct a prediction model, which achieved a noninvasive assessment of moderate to severe renal fibrosis (AUC = 0.85). Han Yuan et&#xa0;al. (<xref ref-type="bibr" rid="B82">82</xref>) used ultrasound viscoelastic imaging technology to assess renal function effectively and the degree of fibrosis via mechanical parameters such as the Emean and Vmean (AUC = 0.91). Xinyue Huang et&#xa0;al. (<xref ref-type="bibr" rid="B83">83</xref>) fused clinical data, conventional ultrasound, shear wave elastography, and plane wave hypersensitivity flow imaging to construct a Fisher discriminant model, which successfully distinguished different fibrosis grades (the highest accuracy was 84.7%). Yidan Tang et&#xa0;al. (<xref ref-type="bibr" rid="B84">84</xref>) integrated conventional ultrasound, contrast-enhanced ultrasound, and elastography to construct a multimodal ultrasound knowledge map and AI prediction model for the risk prediction of sepsis-related acute kidney injury. Ahmed M et&#xa0;al.&#x2019;s XAI-CKD system (<xref ref-type="bibr" rid="B85">85</xref>), which is based on an extra tree classifier combined with SHAP interpretability analysis, achieved near-perfect performance (AUC = 1.0) in CKD classification. Shuyuan Tian et&#xa0;al. (<xref ref-type="bibr" rid="B86">86</xref>) integrated ResNet34 depth features and traditional texture features (GLCM+HOG) to achieve CKD diagnosis, especially in the G5 stage (AUC = 0.931). Minyan Zhu et&#xa0;al. (<xref ref-type="bibr" rid="B87">87</xref>) used an SVM to integrate various types of ultrasound image information and successfully predicted the degree of renal interstitial fibrosis (AUC = 0.943 when the IFTA &gt; 50%). Fuzhe Ma (<xref ref-type="bibr" rid="B65">65</xref>) and Fu Ying (<xref ref-type="bibr" rid="B71">71</xref>) proposed the HMAN-based detection model and PCNN-based image enhancement algorithm, respectively, which improved the image quality and diagnostic reliability. Chin-Chi Kuo et&#xa0;al. (<xref ref-type="bibr" rid="B88">88</xref>) combined ResNet-101 and XGBoost to achieve automatic estimation of the eGFR and CKD grade (AUC = 0.904).</p>
<p>However, an examination of these representative studies reveals the core challenges facing the field (<xref ref-type="bibr" rid="B89">89</xref>&#x2013;<xref ref-type="bibr" rid="B91">91</xref>). First, the generalizability of the models is generally questionable (<xref ref-type="bibr" rid="B92">92</xref>&#x2013;<xref ref-type="bibr" rid="B94">94</xref>). For example, the near-perfect performance (AUC = 1.0) reported by Ahmed M et&#xa0;al. (<xref ref-type="bibr" rid="B85">85</xref>) is highly unusual in real medical data, strongly suggesting that the model may be overfitted on a specific dataset, and its cross-center applicability urgently needs to be verified. Second, the clinical translation of the technology faces a realistic bottleneck. For example, the multimodal fusion scheme of Xinyue Huang et&#xa0;al. (<xref ref-type="bibr" rid="B83">83</xref>) has improved performance, but its dependence on a variety of advanced imaging technologies is difficult to popularize in primary medical institutions, and the actual application cost is high. In addition, the limitations of research methods urgently need to be overcome. Although the pioneering work of Chin-Chi Kuo et&#xa0;al. (<xref ref-type="bibr" rid="B88">88</xref>) verified its technical feasibility, the limitations of its single-center design and lack of prospective validation are still common problems in many subsequent studies. In summary, although current research has made continuous breakthroughs in model performance, it is generally limited by key bottlenecks such as single-center data dependence, insufficient cross-center validation, and insufficient consideration of clinical applicability (<xref ref-type="bibr" rid="B95">95</xref>, <xref ref-type="bibr" rid="B96">96</xref>).</p>
</sec>
<sec id="s4_3">
<title>Renal disease diagnosis</title>
<p>Ultrasound imaging plays an irreplaceable role in the diagnosis of renal diseases (<xref ref-type="bibr" rid="B27">27</xref>). It is widely used in the assessment of renal morphology, screening of space-occupying lesions, diagnosis of hydronephrosis, monitoring after renal transplantation, and differentiation of cystic and solid lesions (<xref ref-type="bibr" rid="B97">97</xref>, <xref ref-type="bibr" rid="B98">98</xref>). Especially in children, pregnant women and patients with renal insufficiency, ultrasound has become the preferred imaging method because of its safety (<xref ref-type="bibr" rid="B99">99</xref>). However, traditional renal ultrasound diagnosis also has obvious shortcomings: the results are highly dependent on the experience and skills of the operators, and there is strong subjectivity (<xref ref-type="bibr" rid="B100">100</xref>). It is not sensitive to early changes in renal function or slight structural changes (<xref ref-type="bibr" rid="B101">101</xref>). The ability of quantitative analysis is limited; for example, accurate assessment of renal fibrosis, diffuse lesions, or small hemodynamic changes is difficult. In addition, the low degree of standardization among different devices and scanning parameters also affects the comparability and repeatability of the results. DL technology has shown great potential in the diagnosis of renal diseases (<xref ref-type="bibr" rid="B102">102</xref>, <xref ref-type="bibr" rid="B103">103</xref>). <xref ref-type="table" rid="T4"><bold>Table&#xa0;4</bold></xref> lists several representative studies on renal disease diagnosis.</p>
<table-wrap id="T4" position="float">
<label>Table&#xa0;4</label>
<caption>
<p>Application of DL technology in the diagnosis of renal diseases.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="left">Author, ref</th>
<th valign="middle" align="left">Year</th>
<th valign="middle" align="left">Algorithms</th>
<th valign="middle" align="left">Data source</th>
<th valign="middle" align="left">Size</th>
<th valign="middle" align="left">Goals/approach</th>
<th valign="middle" align="left">Results</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">Miguel Molina-Moreno et&#xa0;al. (<xref ref-type="bibr" rid="B75">75</xref>)</td>
<td valign="middle" align="left">2024</td>
<td valign="middle" align="left">ResNet-50, Mask-RCNN</td>
<td valign="middle" align="left">Single center</td>
<td valign="middle" align="left">1985 images</td>
<td valign="middle" align="left">Multitask convolutional neural network</td>
<td valign="middle" align="left">AUC=0.819</td>
</tr>
<tr>
<td valign="middle" align="left">Shi Yin et&#xa0;al. (<xref ref-type="bibr" rid="B104">104</xref>)</td>
<td valign="middle" align="left">2020</td>
<td valign="middle" align="left">CAKUT</td>
<td valign="middle" align="left">Single center</td>
<td valign="middle" align="left">157 children</td>
<td valign="middle" align="left">Multi-instance deep learning method based on multi-view ultrasound images</td>
<td valign="middle" align="left">AUC=0.961</td>
</tr>
<tr>
<td valign="middle" align="left">Umar Islam Shi et&#xa0;al. (<xref ref-type="bibr" rid="B27">27</xref>)</td>
<td valign="middle" align="left">2024</td>
<td valign="middle" align="left">Novel DCNN</td>
<td valign="middle" align="left">Public Dataset</td>
<td valign="middle" align="left">1057 images</td>
<td valign="middle" align="left">New two-path DCNN model</td>
<td valign="middle" align="left">Acc=99.8%</td>
</tr>
<tr>
<td valign="middle" align="left">Jinjin Hai et&#xa0;al. (<xref ref-type="bibr" rid="B105">105</xref>)</td>
<td valign="middle" align="left">2021</td>
<td valign="middle" align="left">CD-ConcatNet</td>
<td valign="middle" align="left">Single center</td>
<td valign="middle" align="left">76 patients</td>
<td valign="middle" align="left">2D and 3D feature processing were fused to process multi-view ultrasound images</td>
<td valign="middle" align="left">AUC=0.8667</td>
</tr>
<tr>
<td valign="middle" align="left">S Sudharson et&#xa0;al. (<xref ref-type="bibr" rid="B98">98</xref>)</td>
<td valign="middle" align="left">2020</td>
<td valign="middle" align="left">ResNet-101+ ShuffleNet+ MobileNet-v2</td>
<td valign="middle" align="left">Public Dataset</td>
<td valign="middle" align="left">4940 images</td>
<td valign="middle" align="left">Deep neural network ensemble model based on transfer learning</td>
<td valign="middle" align="left">Acc=96.54%</td>
</tr>
<tr>
<td valign="middle" align="left">Ming-Chin Tsai et&#xa0;al. (<xref ref-type="bibr" rid="B106">106</xref>)</td>
<td valign="middle" align="left">2022</td>
<td valign="middle" align="left">ResNet-50+ Transfer Learning</td>
<td valign="middle" align="left">Single center</td>
<td valign="middle" align="left">1599 images</td>
<td valign="middle" align="left">Transfer learning model based on ResNet-50</td>
<td valign="middle" align="left">AUC=0.959</td>
</tr>
<tr>
<td valign="middle" align="left">Maosheng Xu et&#xa0;al. (<xref ref-type="bibr" rid="B22">22</xref>)</td>
<td valign="middle" align="left">2025</td>
<td valign="middle" align="left">Combined multimodal ultrasound</td>
<td valign="middle" align="left">Single center</td>
<td valign="middle" align="left">341 patients</td>
<td valign="middle" align="left">Multimodal ultrasound combined diagnostic model</td>
<td valign="middle" align="left">AUC=0.75</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Miguel Molina-Moreno et&#xa0;al. (<xref ref-type="bibr" rid="B75">75</xref>) developed URI-CADS, an automatic system based on a multitask convolutional neural network, to realize the integrated analysis of kidney image segmentation and multi-pathological diagnosis, and its AUC reached 0.819 for multiple pathological diagnoses. Shi Yin et&#xa0;al. (<xref ref-type="bibr" rid="B104">104</xref>) proposed a multi-example deep learning framework to distinguish children with congenital anomalies of the kidney and urinary tract (CAKUT) effectively from those with unilateral hydronephrosis by clustering multi-view ultrasound image features. The AUC of the MIL model was as high as 0.961. Umar Islam et&#xa0;al. (<xref ref-type="bibr" rid="B27">27</xref>) designed a novel double-path convolutional neural network, which was significantly superior to classical models (such as VGG16 and ResNet50) in the detection of hydronephrosis, reaching 99.8% accuracy. Jinjin Hai et&#xa0;al. (<xref ref-type="bibr" rid="B105">105</xref>) integrated 2D and 3D convolutional structures to construct CD-ConcatNet to achieve fusion feature extraction and disease classification of multi-view renal ultrasound images (AUC = 0.8667). In addition, Sudharson et&#xa0;al. (<xref ref-type="bibr" rid="B98">98</xref>) achieved a high-precision four-classification task by integrating multiple pretrained models (ResNet-101, ShuffleNet, and MobileNet-v2) with an accuracy of 96.54%. Ming-Chin Tsai et&#xa0;al. (<xref ref-type="bibr" rid="B106">106</xref>) used transfer learning to optimize ResNet-50 to screen for children&#x2019;s kidney abnormalities, and the AUC of the model was 0.959. Maosheng Xu et&#xa0;al. (<xref ref-type="bibr" rid="B22">22</xref>) combined two-dimensional ultrasound, color Doppler and shear wave elastography to construct a multimodal combined diagnostic model, and the AUC reached 0.75 in the classification of glomerular diseases in children.</p>
<p>These studies show the broad prospects of deep learning in improving the automation, quantification and multi-disease discrimination of renal ultrasound diagnosis (<xref ref-type="bibr" rid="B107">107</xref>, <xref ref-type="bibr" rid="B108">108</xref>). However, the in-depth analysis identified several key issues of concern: first, the prudent evaluation of model performance. For example, the 99.8% accuracy reported by Umar Islam et&#xa0;al. (<xref ref-type="bibr" rid="B27">27</xref>) is extremely rare in medical image analysis and may reflect improper partitioning of the dataset or overfitting risk. Second, the clinical usefulness of complex models is questionable. Although the multitask system of Miguel Molina-Moreno et&#xa0;al. (<xref ref-type="bibr" rid="B75">75</xref>) has comprehensive functions, its stability under multicenter and different equipment conditions has not been verified. However, the multimodal method of Maosheng Xu et&#xa0;al. (<xref ref-type="bibr" rid="B22">22</xref>) has relatively limited performance (AUC = 0.75), suggesting that complex technology fusion may not improve performance. In addition, the study population was underrepresented. Most studies have focused on common conditions in children, and the applicability of these findings to complex abnormal renal structures (such as severe malformations and postoperative changes) remains to be investigated. In summary, these current studies are constantly innovating at the technical level, but there are still obvious deficiencies in the ability verification of model generalization and clinical practicality assessment (<xref ref-type="bibr" rid="B109">109</xref>).</p>
</sec>
</sec>
<sec id="s5">
<title>Challenges of DL technology in renal ultrasound applications</title>
<sec id="s5_1">
<title>Data-related challenges</title>
<sec id="s5_1_1">
<title>Data quality and labeling</title>
<p>The data quality of renal ultrasound images directly affects the performance of DL models. Common noise, artifacts, and inconsistent resolution of ultrasound images lead to blurred renal structural boundaries and inapparent features, which affect the extraction of key information by the model. Guo et&#xa0;al. (<xref ref-type="bibr" rid="B58">58</xref>) demonstrated that a 30% reduction in the signal-to-noise ratio of ultrasound images can reduce the Dice coefficient of deep learning renal segmentation models by 8%-12%. High-quality labeling is the basis for training a reliable model, but renal ultrasound labeling faces many challenges (<xref ref-type="bibr" rid="B110">110</xref>). The unclear boundaries of the kidney and the lesion area lead to large differences in labeling between different doctors, and it takes 10&#x2013;15 minutes for senior sonographers to label a single image (<xref ref-type="bibr" rid="B111">111</xref>). In addition, the inconsistency of labeling standards makes it difficult to integrate multicenter data and affects the generalizability of the model. Wu et&#xa0;al. (<xref ref-type="bibr" rid="B112">112</xref>) demonstrated that the inconsistency of observer labeling leads to a significant decrease in the average accuracy (AP) of a deep learning model. The AP50 value was 92.17% when the full labeling method was used, whereas the AP50 value increased to 98.57% when the local labeling method was used. To improve the quality of labeling, some studies have used enhanced data labeling strategies and automated pre-labeling techniques while evaluating labeling consistency through gradient mapping (<xref ref-type="bibr" rid="B105">105</xref>). These methods are helpful for improving the training effect of renal ultrasound AI models (<xref ref-type="bibr" rid="B113">113</xref>).</p>
</sec>
<sec id="s5_1_2">
<title>Insufficient data</title>
<p>In DL research on renal ultrasound, data scarcity, especially for rare diseases such as renal medullary cystic disease or hereditary nephritis, is the core challenge (<xref ref-type="bibr" rid="B114">114</xref>). Small samples can easily lead to overfitting and poor generalization ability of the model (<xref ref-type="bibr" rid="B115">115</xref>). Data heterogeneity in multiple centers further aggravates the problem of uneven data distribution. The DL model developed by Akbari et&#xa0;al. (<xref ref-type="bibr" rid="B64">64</xref>) showed high consistency (correlation coefficient &gt;0.9) on single-center data, whereas the correlation coefficient decreased to approximately 0.8 when external multicenter validation was performed. To solve these problems, researchers have used data enhancement techniques (rotation, scaling, noise, etc.), which can improve the accuracy of small sample models by 5%-10% (<xref ref-type="bibr" rid="B116">116</xref>). Transfer learning can reduce the dependence on the amount of task-specific data by transferring generic features and still maintain high classification performance when the training samples are halved (<xref ref-type="bibr" rid="B117">117</xref>). In addition, cross-center data standardization and synthetic data generation, such as the diffusion model Med-DDPM, have also been explored to mitigate data scarcity and privacy issues. However, these methods still need more clinical validation to address potential limitations, such as algorithm transparency and data security.</p>
</sec>
</sec>
<sec id="s5_2">
<title>Technology-related challenges</title>
<sec id="s5_2_1">
<title>Interpretability of the algorithm</title>
<p>At present, in renal ultrasound diagnosis, although complex DL algorithms such as deep neural networks have high accuracy, their &#x201c;black box&#x201d; characteristics make the decision-making basis difficult to understand, which severely restricts clinical trust and application (<xref ref-type="bibr" rid="B118">118</xref>). For example, models cannot explain why tumors with ill-defined boundaries are considered malignant, forcing physicians to rely on traditional pathological tests (<xref ref-type="bibr" rid="B119">119</xref>). Alderden et&#xa0;al. (<xref ref-type="bibr" rid="B120">120</xref>) used attention mechanisms to highlight key image areas, resulting in a 35% increase in physician trust. Feature visualization technology reveals decision logic by showing the texture, echo, and other features that the model focuses on (<xref ref-type="bibr" rid="B121">121</xref>). In addition, a lack of interpretability exacerbates ethical risks, making it difficult to assess potential biases of models against specific populations, such as different ages or genders (<xref ref-type="bibr" rid="B122">122</xref>). These challenges highlight the importance of developing interpretable artificial intelligence (XAI) methods. It is necessary to enhance transparency through visual interpretation, rule extraction and other techniques and establish a standardized ethical review framework to promote the safe application of AI in renal ultrasound diagnosis (<xref ref-type="bibr" rid="B123">123</xref>).</p>
</sec>
<sec id="s5_2_2">
<title>Robustness and generalization ability of the model</title>
<p>The robustness and generalizability of renal ultrasound AI models face multiple challenges, which are reflected in three main aspects: equipment differences, operator factors, and individual patient differences (<xref ref-type="bibr" rid="B124">124</xref>). The imaging principles and parameter settings of different ultrasound devices lead to differences in image feature distributions (<xref ref-type="bibr" rid="B125">125</xref>). For example, images from high-end ultrasound devices have high resolution and low noise, whereas images from primary hospitals may have obvious artifacts. The image quality was also affected by the operator&#x2019;s scanning technique and section selection. The images of the same patient collected by different doctors may cause the Dice coefficient of the model segmentation results to fluctuate by 5%-7% (<xref ref-type="bibr" rid="B126">126</xref>). A small kidney size and incompletely developed structure in children and renal atrophy and fatty infiltration in elderly patients often lead to a 22% decrease in the performance of models trained for adults on children&#x2019;s data (<xref ref-type="bibr" rid="B127">127</xref>).</p>
</sec>
</sec>
<sec id="s5_3">
<title>Clinical integration challenges</title>
<sec id="s5_3_1">
<title>Integration with the clinical workflow</title>
<p>The core challenges of DL technology in the standardization of renal ultrasound data are the lack of standardization of multisource heterogeneous data and the differences in equipment models, imaging parameters, and operation specifications across different medical institutions, which limits the generalizability of AI models across institutions. The survey revealed that only 32% of the hospitals used unified scanning standards, which severely affected their clinical suitability (<xref ref-type="bibr" rid="B128">128</xref>). &#x160;tevik et&#xa0;al. (<xref ref-type="bibr" rid="B129">129</xref>) explored the integration of AI into clinical workflows, but the need to manually upload images for analysis extended a single examination by 8&#x2013;10 minutes, which did not meet clinical requirements for efficiency. This review revealed that AI-assisted diagnosis can significantly improve diagnostic accuracy. The diagnostic accuracy of AI-modified methods for complex renal diseases has increased by 21%, especially in the automatic detection of hydronephrosis and the classification of chronic renal disease, highlighting the advantages of standardization. Current technical bottlenecks include process interruptions caused by offline analysis modes and the lack of uniform image quality assessment standards, but real-time image analysis and computer-aided diagnosis systems enabled by convolutional neural networks have shown the potential to optimize workflows (<xref ref-type="bibr" rid="B130">130</xref>, <xref ref-type="bibr" rid="B131">131</xref>). In the future, it is necessary to establish cross-platform data standards, develop embedded AI systems, and solve key problems such as algorithm interpretability and insufficient clinical validation to realize intelligent integration of the whole process from image acquisition to diagnostic reporting (<xref ref-type="bibr" rid="B132">132</xref>, <xref ref-type="bibr" rid="B133">133</xref>).</p>
</sec>
<sec id="s5_3_2">
<title>Regulatory and ethical issues</title>
<p>Regulation and ethics are not to be ignored; Muralidharan et&#xa0;al. (<xref ref-type="bibr" rid="B134">134</xref>). reported that only 3.6% of FD-approved AI/IUI medical devices reported race/ethnicity, 99.1% did not provide socioeconomic data, and 81.6% did not report the age of the study subjects. The issue of data privacy is particularly prominent (<xref ref-type="bibr" rid="B135">135</xref>, <xref ref-type="bibr" rid="B136">136</xref>). Because renal ultrasound images contain sensitive information, data sharing from multiple centers often conflicts with privacy regulations (<xref ref-type="bibr" rid="B137">137</xref>). In addition, ethical review should focus on algorithm fairness, including the evaluation of diagnostic differences in patients of different races and economic levels. The current solution emphasizes multiparty collaboration: the need to establish unified regulatory guidelines, improve data anonymization technology, develop a liability identification framework, and reduce algorithm bias through diverse dataset training is essential to promote the safe application of AI in renal ultrasound.</p>
</sec>
</sec>
</sec>
<sec id="s6">
<title>Future prospects of DL technology in renal ultrasound</title>
<sec id="s6_1">
<title>Integration of emerging technologies</title>
<p>The integration of AI and multimodal imaging technology has significantly improved the diagnostic ability for renal diseases. With the anatomical details and functional information provided by CT and MRI, combined with the real-time advantages of ultrasound, the multimodal deep learning model can improve the diagnostic accuracy of renal tumor staging by 23% compared with that of a single ultrasound (<xref ref-type="bibr" rid="B138">138</xref>). The combination of molecular imaging technology and AI enables earlier molecular diagnosis, such as targeted contrast ultrasound molecular imaging, which can detect renal inflammation before renal dysfunction (<xref ref-type="bibr" rid="B139">139</xref>). The integration of wearable devices and Internet of Things technology has created a new mode of remote monitoring. Portable ultrasound devices can provide early warning through cloud-based AI analysis, and clinical trials have shown that acute exacerbation of chronic renal disease can be warned 3&#x2013;5 days earlier (<xref ref-type="bibr" rid="B140">140</xref>). Given the limitations of single-center, single-modality and small samples, large medical models pretrained on large-scale multimodal data can extract shared representations of ultrasound-CT-MRI without massive labeling through cross-modal alignment and self-supervised learning. This approach significantly improves the ability to identify rare kidney diseases, such as hereditary nephritis, and alleviates the overfitting problem caused by scarce data. Edge computing technologies deploy lightweight AI models on portable devices to achieve point-of-care diagnosis. Federated learning achieved a diagnostic accuracy of 90.2% in the collaboration of 10 hospitals through the parameter sharing mechanism, which effectively solved the problem of data privacy (<xref ref-type="bibr" rid="B141">141</xref>). Combined with the federated learning and fine-tuning strategy, each center can share the basic model parameters while retaining local data, realizing multicenter coevolution and overcoming the bottleneck of single-center generalization. Augmented reality technology superimposes AI-processed tumor boundary information on the surgical field in real time, which improves the integrity rate of renal tumor resection by 18% (<xref ref-type="bibr" rid="B84">84</xref>). A large medical model pretrained on massive multimodal data shows strong adaptability and achieves 82% accuracy in the ultrasound diagnosis of rare renal diseases (<xref ref-type="bibr" rid="B142">142</xref>). The future large model will provide an interpretable basis for malignant diagnosis through visualization of the attention mechanism and chain-of-thought reasoning, transform black-box decisions into traceable clinical logic, and enhance the trust of doctors. These technical advances have promoted the rapid development of renal disease diagnosis, from morphological evaluation to functional, molecular, and real-time dynamic monitoring.</p>
</sec>
<sec id="s6_2">
<title>Integrated intelligent diagnosis system</title>
<p>The ultimate goal of AI in the field of renal ultrasound is to develop an intelligent diagnostic system that can provide full-process, high-efficiency, and high-precision decision support for clinical practice through deep integration of a variety of AI functional modules and optimized human&#x2013;computer interactions. The current research frontiers focus on building a one-stop diagnostic platform, deepening the application of personalized medicine, and promoting deep multimodal integration (<xref ref-type="bibr" rid="B143">143</xref>). The technical basis of the system is built on the framework of a multimodal large model fusion mechanism: unified representation learning is used to integrate ultrasound, CT, MRI and pathomics data, and a cross-attention module is used to achieve dynamic weighting of cross-modal features, which overcomes the limitations of traditional narrow AI, which only processes a single image. At the bottom of the platform, a lightweight real-time inference engine is deployed to compress the number of large model parameters to the scale that can be deployed on edge devices to meet the clinical needs of millisecond response. The core of the one-stop diagnostic platform seamlessly integrates the full chain of renal ultrasound AI applications, including real-time image quality assessment and standardized section guidance, automatic renal segmentation and volume measurement, dynamic prediction of renal function on the basis of image features and elastography parameters, intelligent identification and classification of common renal diseases, and automatic generation of structured diagnostic reports (<xref ref-type="bibr" rid="B144">144</xref>). The platform adopts the paradigm of large model pretraining + domain fine-tuning. First, self-supervised pretraining is carried out on millions of multicenter and multimodal data, and then efficient fine-tuning techniques such as LoRA are used to adapt local device parameters and population characteristics on the center-specific data to ensure cross-center generalization ability and localization accuracy. The platform uses a microservice architecture and workflow engine, allowing each AI module to call on demand, feed results to each other, and realize a closed loop of &#x201c;scan, analysis, report&#x201d; through a unified interface. Clinical verification shows that the integrated system can reduce the time of renal ultrasound examination and diagnosis by more than 40% and improve the consistency of diagnosis. By integrating the visualization research module of the attention mechanism, the closed loop can generate heatmaps in real time and overlay them on the original image, clearly label the decision basis of the model, and transform the black box into a transparent decision chain. The core of personalized medicine is the use of AI to mine multidimensional data of individual patients (such as dynamic changes in ultrasound imaging features, genetic background, serum/urine biomarker trajectories, comorbidities, and medication history in electronic health records) and the construction of patient-specific disease progression prediction models and treatment response models. For example, Bayesian deep learning models that combine trends in kidney texture features with genomic data can generate customized predictions of kidney decline trajectories for each CKD patient. The large model-based longitudinal dynamic fusion framework uses a temporal fusion transformer (temporal fusion transformer), which captures the imaging evolution of patients for months or even years, combined with time series data from electronic medical records, to realize dynamic risk warning and adaptive adjustment of treatment plans. Multimodal fusion is the technical cornerstone for achieving precision personalization. The future system will break through the current simple fusion of &#x201c;ultrasound + clinical data&#x201d; and evolve into deep heterogeneous data. Cross-image modality fusion uses AI to align and fuse the complementary information of renal ultrasound and CT/MRI/PET-CT. Radiomics and environment fusion integrate ultrasound radiomics features, serum/urine proteomics/metabolomics, and environmental exposure factors (<xref ref-type="bibr" rid="B116">116</xref>). The final multicenter, multimodal, and large-sample standardized platform aggregates multicenter data through federated learning. The basic model of 100 million parameters is trained to form a clinical decision-making center that can be transferred, interpreted and responded to in real time, and the fundamental challenge of the disconnect between traditional AI and clinical practice is completely solved.</p>
</sec>
<sec id="s6_3">
<title>Multidisciplinary cooperation</title>
<p>The breakthrough of DL in the field of renal ultrasound requires deep interdisciplinary integration, integrating the expertise of computer scientists (algorithm design), imaging experts (image annotation and clinical relevance), nephrologists (diagnosis and treatment decision-making) and biomedical engineers (signal and software and hardware optimization) (<xref ref-type="bibr" rid="B145">145</xref>). Through joint discussion and clinical rotation to establish a common understanding, collaboration should run through the full cycle from clinical requirement definition, data collection, and model development to clinical verification to avoid technology being divorced from reality (<xref ref-type="bibr" rid="B146">146</xref>). This collaborative model can not only improve the reliability and interpretability of AI in the diagnosis of hydronephrosis and nephropathy but also optimize its clinical applicability so that resources can be focused on real bottleneck problems (<xref ref-type="bibr" rid="B147">147</xref>).</p>
</sec>
</sec>
<sec id="s7" sec-type="conclusions">
<title>Conclusions</title>
<p>DL has brought transformative advances to renal ultrasound, enhancing diagnostic accuracy, efficiency, and standardization across image segmentation, volumetry, disease diagnosis, and functional prediction while addressing traditional ultrasound limitations such as operator dependence and insufficient quantification&#x2014;with performance comparable to that of professional physicians in renal structure recognition and lesion detection (<xref ref-type="bibr" rid="B148">148</xref>); however, critical research gaps, including uneven data quality, inadequate standardization/labeling, limited algorithm interpretability, poor cross-device generalization, clinical integration barriers, and incomplete regulatory/ethical frameworks, hinder its clinical translation, and accelerating this process requires targeted steps such as establishing unified data standards, developing explainable AI, deepening interdisciplinary collaboration, and refining regulatory guidelines, with future advancements in multimodal fusion, federated renal ultrasound systems, and medical large language models driving AI toward intelligent, personalized renal ultrasound systems that optimize the full workflow from image acquisition to clinical decision-making, ultimately enabling AI to become a core tool for precise renal disease diagnosis and treatment and supporting global kidney health management.</p>
</sec>
</body>
<back>
<sec id="s8" sec-type="author-contributions">
<title>Author contributions</title>
<p>YZha: Writing &#x2013; original draft, Methodology, Supervision, Data curation, Conceptualization, Software, Investigation, Resources, Validation, Formal Analysis, Project administration, Funding acquisition, Visualization, Writing &#x2013; review &amp; editing. YH: Writing &#x2013; review &amp; editing, Conceptualization, Resources, Data curation. YZhu: Investigation, Methodology, Writing &#x2013; review &amp; editing, Supervision. KC: Writing &#x2013; review &amp; editing, Project administration, Formal Analysis, Data curation. WL: Visualization, Investigation, Data curation, Funding acquisition, Writing &#x2013; review &amp; editing. YL: Funding acquisition, Project administration, Methodology, Writing &#x2013; review &amp; editing. JL: Methodology, Conceptualization, Writing &#x2013; review &amp; editing, Project administration, Validation. TQ: Writing &#x2013; review &amp; editing.</p></sec>
<ack>
<title>Acknowledgments</title>
<p>We would like to thank the Biomedical Engineering Experimental Teaching Center of Sichuan University for their assistance in the experiments.</p>
</ack>
<sec id="s10" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p></sec>
<sec id="s11" sec-type="ai-statement">
<title>Generative AI statement</title>
<p>The author(s) declared that generative AI was not used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p></sec>
<sec id="s12" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p></sec>
<ref-list>
<title>References</title>
<ref id="B1">
<label>1</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Bello</surname> <given-names>AK</given-names></name>
<name><surname>Ronksley</surname> <given-names>PE</given-names></name>
<name><surname>Tangri</surname> <given-names>N</given-names></name>
<name><surname>Kurzawa</surname> <given-names>J</given-names></name>
<name><surname>Osman</surname> <given-names>MA</given-names></name>
<name><surname>Singer</surname> <given-names>A</given-names></name>
<etal/>
</person-group>. 
<article-title>Quality of chronic kidney disease management in canadian primary care</article-title>. <source>JAMA network Open</source>. (<year>2019</year>) <volume>2</volume>:<fpage>e1910704</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1001/jamanetworkopen.2019.10704</pub-id>, PMID: <pub-id pub-id-type="pmid">31483474</pub-id>
</mixed-citation>
</ref>
<ref id="B2">
<label>2</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Rashidi</surname> <given-names>P</given-names></name>
<name><surname>Bihorac</surname> <given-names>A</given-names></name>
</person-group>. 
<article-title>Artificial intelligence approaches to improve kidney care</article-title>. <source>Nat Rev Nephrol</source>. (<year>2020</year>) <volume>16</volume>:<page-range>71&#x2013;2</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41581-019-0243-3</pub-id>, PMID: <pub-id pub-id-type="pmid">31873197</pub-id>
</mixed-citation>
</ref>
<ref id="B3">
<label>3</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kranert</surname> <given-names>PC</given-names></name>
<name><surname>Kranert</surname> <given-names>P</given-names></name>
<name><surname>Banas</surname> <given-names>MC</given-names></name>
<name><surname>Jung</surname> <given-names>EM</given-names></name>
<name><surname>Banas</surname> <given-names>B</given-names></name>
<name><surname>Putz</surname> <given-names>FJ</given-names></name>
<etal/>
</person-group>. 
<article-title>Utility of ultrasound-guided attenuation parameter (UGAP) in renal angiomyolipoma (AML): first results</article-title>. <source>Diagnostics</source>. (<year>2024</year>) <volume>14</volume>:<elocation-id>2002</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/diagnostics14182002</pub-id>, PMID: <pub-id pub-id-type="pmid">39335680</pub-id>
</mixed-citation>
</ref>
<ref id="B4">
<label>4</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Dinescu</surname> <given-names>SC</given-names></name>
<name><surname>Stoica</surname> <given-names>D</given-names></name>
<name><surname>Bita</surname> <given-names>CE</given-names></name>
<name><surname>Nicoara</surname> <given-names>AI</given-names></name>
<name><surname>Cirstei</surname> <given-names>M</given-names></name>
<name><surname>Staiculesc</surname> <given-names>MA</given-names></name>
<etal/>
</person-group>. 
<article-title>Applications of artificial intelligence in musculoskeletal ultrasound: narrative review</article-title>. <source>Front Med</source>. (<year>2023</year>) <volume>10</volume>:<elocation-id>1286085</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fmed.2023.1286085</pub-id>, PMID: <pub-id pub-id-type="pmid">38076232</pub-id>
</mixed-citation>
</ref>
<ref id="B5">
<label>5</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Revzin</surname> <given-names>MV</given-names></name>
<name><surname>Srivastava</surname> <given-names>B</given-names></name>
<name><surname>Pellerito</surname> <given-names>JS</given-names></name>
</person-group>. 
<article-title>Ultrasound of the upper urinary tract</article-title>. <source>Radiologic Clinics North America</source>. (<year>2025</year>) <volume>63</volume>:<fpage>57</fpage>&#x2013;<lpage>82</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rcl.2024.09.002</pub-id>, PMID: <pub-id pub-id-type="pmid">39510663</pub-id>
</mixed-citation>
</ref>
<ref id="B6">
<label>6</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gunabushanam</surname> <given-names>G</given-names></name>
<name><surname>Chaubal</surname> <given-names>R</given-names></name>
<name><surname>Scoutt</surname> <given-names>LM</given-names></name>
</person-group>. 
<article-title>Doppler ultrasound of the renal vasculature</article-title>. <source>J ultrasound Med</source>. (<year>2024</year>) <volume>43</volume>:<page-range>1543&#x2013;62</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/jum.16466</pub-id>, PMID: <pub-id pub-id-type="pmid">38654477</pub-id>
</mixed-citation>
</ref>
<ref id="B7">
<label>7</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xu</surname> <given-names>Y</given-names></name>
<name><surname>Luo</surname> <given-names>Y</given-names></name>
<name><surname>Chen</surname> <given-names>M</given-names></name>
<name><surname>Peng</surname> <given-names>Q</given-names></name>
<name><surname>Niu</surname> <given-names>C</given-names></name>
</person-group>. 
<article-title>Super-resolution ultrasound imaging of renal microcirculation in a murine model of renal fibrosis</article-title>. <source>J ultrasound Med</source>. (<year>2025</year>) <volume>44</volume>:<page-range>2229&#x2013;41</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/jum.70003</pub-id>, PMID: <pub-id pub-id-type="pmid">40665893</pub-id>
</mixed-citation>
</ref>
<ref id="B8">
<label>8</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Brasseler</surname> <given-names>M</given-names></name>
<name><surname>Finkelberg</surname> <given-names>I</given-names></name>
<name><surname>M&#xfc;ntjes</surname> <given-names>C</given-names></name>
<name><surname>Cetiner</surname> <given-names>M</given-names></name>
</person-group>. 
<article-title>Case Report: Renal artery stenosis in children: ultrasound as a decisive diagnostic and therapy-accompanying technique</article-title>. <source>Front Pediatr</source>. (<year>2023</year>) <volume>11</volume>:<elocation-id>1251757</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fped.2023.1251757</pub-id>, PMID: <pub-id pub-id-type="pmid">38078312</pub-id>
</mixed-citation>
</ref>
<ref id="B9">
<label>9</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tufano</surname> <given-names>A</given-names></name>
<name><surname>Antonelli</surname> <given-names>L</given-names></name>
<name><surname>Di Pierro</surname> <given-names>GB</given-names></name>
<name><surname>Flammia</surname> <given-names>RS</given-names></name>
<name><surname>Minelli</surname> <given-names>R</given-names></name>
<name><surname>Anceschi</surname> <given-names>U</given-names></name>
<etal/>
</person-group>. 
<article-title>Diagnostic performance of contrast-enhanced ultrasound in the evaluation of small renal masses: A systematic review and meta-analysis</article-title>. <source>Diagnostics</source>. (<year>2022</year>) <volume>12</volume>:<elocation-id>2310</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/diagnostics12102310</pub-id>, PMID: <pub-id pub-id-type="pmid">36291999</pub-id>
</mixed-citation>
</ref>
<ref id="B10">
<label>10</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Franke</surname> <given-names>D</given-names></name>
<name><surname>Renz</surname> <given-names>DM</given-names></name>
<name><surname>Mentzel</surname> <given-names>HJ</given-names></name>
</person-group>. 
<article-title>Bildgebung nach Nierentransplantation im Kindes- und Jugendalter</article-title>. <source>Radiologie (Heidelberg Germany)</source>. (<year>2024</year>) <volume>64</volume>:<fpage>45</fpage>&#x2013;<lpage>53</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00117-023-01249-x</pub-id>, PMID: <pub-id pub-id-type="pmid">38180539</pub-id>
</mixed-citation>
</ref>
<ref id="B11">
<label>11</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Langdon</surname> <given-names>J</given-names></name>
<name><surname>Sharbidre</surname> <given-names>K</given-names></name>
<name><surname>Garner</surname> <given-names>MS</given-names></name>
<name><surname>Robbin</surname> <given-names>M</given-names></name>
<name><surname>Scoutt</surname> <given-names>LM</given-names></name>
</person-group>. 
<article-title>Renal transplant ultrasound: assessment of complications and advanced applications</article-title>. <source>Abdominal Radiol</source>. (<year>2025</year>) <volume>50</volume>:<page-range>2558&#x2013;85</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00261-024-04731-9</pub-id>, PMID: <pub-id pub-id-type="pmid">39643733</pub-id>
</mixed-citation>
</ref>
<ref id="B12">
<label>12</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ng</surname> <given-names>KH</given-names></name>
<name><surname>Wong</surname> <given-names>JHD</given-names></name>
<name><surname>Leong</surname> <given-names>SS</given-names></name>
</person-group>. 
<article-title>Shear wave elastography in chronic kidney disease - the physics and clinical application</article-title>. <source>Phys Eng Sci Med</source>. (<year>2024</year>) <volume>47</volume>:<fpage>17</fpage>&#x2013;<lpage>29</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s13246-023-01358-w</pub-id>, PMID: <pub-id pub-id-type="pmid">38078996</pub-id>
</mixed-citation>
</ref>
<ref id="B13">
<label>13</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shi</surname> <given-names>LQ</given-names></name>
<name><surname>Sun</surname> <given-names>J</given-names></name>
<name><surname>Yuan</surname> <given-names>L</given-names></name>
<name><surname>Wang</surname> <given-names>XW</given-names></name>
<name><surname>Li</surname> <given-names>W</given-names></name>
</person-group>. 
<article-title>Diagnostic performance of renal cortical elasticity by supersonic shear wave imaging in pediatric glomerular disease</article-title>. <source>Eur J Radiol</source>. (<year>2023</year>) <volume>168</volume>:<fpage>111113</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ejrad.2023.111113</pub-id>, PMID: <pub-id pub-id-type="pmid">37820521</pub-id>
</mixed-citation>
</ref>
<ref id="B14">
<label>14</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kelly</surname> <given-names>BC</given-names></name>
<name><surname>Fung</surname> <given-names>R</given-names></name>
<name><surname>Fung</surname> <given-names>C</given-names></name>
</person-group>. 
<article-title>Risk stratification framework to improve the utility of renal ultrasound in acute kidney injury</article-title>. <source>SA J Radiol</source>. (<year>2024</year>) <volume>28</volume>:<elocation-id>2889</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.4102/sajr.v28i1.2889</pub-id>, PMID: <pub-id pub-id-type="pmid">39114743</pub-id>
</mixed-citation>
</ref>
<ref id="B15">
<label>15</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>De Jesus-Rodriguez</surname> <given-names>HJ</given-names></name>
<name><surname>Morgan</surname> <given-names>MA</given-names></name>
<name><surname>Sagreiya</surname> <given-names>H</given-names></name>
</person-group>. 
<article-title>Deep learning in kidney ultrasound: overview, frontiers, and challenges</article-title>. <source>Adv chronic Kidney Dis</source>. (<year>2021</year>) <volume>28</volume>:<page-range>262&#x2013;9</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1053/j.ackd.2021.07.004</pub-id>, PMID: <pub-id pub-id-type="pmid">34906311</pub-id>
</mixed-citation>
</ref>
<ref id="B16">
<label>16</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Cai</surname> <given-names>L</given-names></name>
<name><surname>Pfob</surname> <given-names>A</given-names></name>
</person-group>. 
<article-title>Artificial intelligence in abdominal and pelvic ultrasound imaging: current applications</article-title>. <source>Abdominal Radiol</source>. (<year>2025</year>) <volume>50</volume>:<page-range>1775&#x2013;89</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00261-024-04640-x</pub-id>, PMID: <pub-id pub-id-type="pmid">39487919</pub-id>
</mixed-citation>
</ref>
<ref id="B17">
<label>17</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>McDonald</surname> <given-names>R</given-names></name>
<name><surname>Watchorn</surname> <given-names>J</given-names></name>
<name><surname>Hutchings</surname> <given-names>S</given-names></name>
</person-group>. 
<article-title>New ultrasound techniques for acute kidney injury diagnostics</article-title>. <source>Curr Opin Crit Care</source>. (<year>2024</year>) <volume>30</volume>:<page-range>571&#x2013;6</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1097/MCC.0000000000001216</pub-id>, PMID: <pub-id pub-id-type="pmid">39503207</pub-id>
</mixed-citation>
</ref>
<ref id="B18">
<label>18</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jia</surname> <given-names>J</given-names></name>
<name><surname>Wang</surname> <given-names>B</given-names></name>
<name><surname>Wang</surname> <given-names>Y</given-names></name>
<name><surname>Han</surname> <given-names>Y</given-names></name>
</person-group>. 
<article-title>Application of ultrasound in early prediction of delayed graft function after renal transplantation</article-title>. <source>Abdominal Radiol</source>. (<year>2024</year>) <volume>49</volume>:<page-range>3548&#x2013;58</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00261-024-04353-1</pub-id>, PMID: <pub-id pub-id-type="pmid">38760530</pub-id>
</mixed-citation>
</ref>
<ref id="B19">
<label>19</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yan</surname> <given-names>L</given-names></name>
<name><surname>Li</surname> <given-names>Q</given-names></name>
<name><surname>Fu</surname> <given-names>K</given-names></name>
<name><surname>Zhou</surname> <given-names>X</given-names></name>
<name><surname>Zhang</surname> <given-names>K</given-names></name>
</person-group>. 
<article-title>Progress in the application of artificial intelligence in ultrasound-assisted medical diagnosis</article-title>. <source>Bioengineering</source>. (<year>2025</year>) <volume>12</volume>:<elocation-id>288</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/bioengineering12030288</pub-id>, PMID: <pub-id pub-id-type="pmid">40150752</pub-id>
</mixed-citation>
</ref>
<ref id="B20">
<label>20</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xu</surname> <given-names>T</given-names></name>
<name><surname>Zhang</surname> <given-names>XY</given-names></name>
<name><surname>Yang</surname> <given-names>N</given-names></name>
<name><surname>Jiang</surname> <given-names>F</given-names></name>
<name><surname>Chen</surname> <given-names>GQ</given-names></name>
<name><surname>Pan</surname> <given-names>X</given-names></name>
<etal/>
</person-group>. 
<article-title>A narrative review on the application of artificial intelligence in renal ultrasound</article-title>. <source>Front Oncol</source>. (<year>2024</year>) <volume>13</volume>:<elocation-id>1252630</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fonc.2023.1252630</pub-id>, PMID: <pub-id pub-id-type="pmid">38495082</pub-id>
</mixed-citation>
</ref>
<ref id="B21">
<label>21</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Niyyar</surname> <given-names>VD</given-names></name>
<name><surname>Ross</surname> <given-names>DW</given-names></name>
<name><surname>O'Neill</surname> <given-names>WC</given-names></name>
</person-group>. 
<article-title>Performance and interpretation of sonography in the practice of nephrology: core curriculum 2024</article-title>. <source>Am J Kidney Dis</source>. (<year>2024</year>) <volume>83</volume>:<page-range>531&#x2013;45</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1053/j.ajkd.2023.09.006</pub-id>, PMID: <pub-id pub-id-type="pmid">38108672</pub-id>
</mixed-citation>
</ref>
<ref id="B22">
<label>22</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xu</surname> <given-names>M</given-names></name>
<name><surname>Guo</surname> <given-names>X</given-names></name>
<name><surname>Chen</surname> <given-names>X</given-names></name>
<name><surname>Wu</surname> <given-names>Y</given-names></name>
<name><surname>Huang</surname> <given-names>X</given-names></name>
<name><surname>Li</surname> <given-names>X</given-names></name>
<etal/>
</person-group>. 
<article-title>Noninvasive assessment of pediatric glomerular disease: multimodal ultrasound</article-title>. <source>Quantitative Imaging Med Surg</source>. (<year>2025</year>) <volume>15</volume>:<fpage>15</fpage>&#x2013;<lpage>29</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.21037/qims-24-1126</pub-id>, PMID: <pub-id pub-id-type="pmid">39839045</pub-id>
</mixed-citation>
</ref>
<ref id="B23">
<label>23</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Pak</surname> <given-names>S</given-names></name>
<name><surname>Park</surname> <given-names>SG</given-names></name>
<name><surname>Park</surname> <given-names>J</given-names></name>
<name><surname>Cho</surname> <given-names>ST</given-names></name>
<name><surname>Lee</surname> <given-names>YG</given-names></name>
<name><surname>Ahn</surname> <given-names>H</given-names></name>
<etal/>
</person-group>. 
<article-title>Applications of artificial intelligence in urologic oncology</article-title>. <source>Invest Clin Urol</source>. (<year>2024</year>) <volume>65</volume>:<page-range>202&#x2013;16</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.4111/icu.20230435</pub-id>, PMID: <pub-id pub-id-type="pmid">38714511</pub-id>
</mixed-citation>
</ref>
<ref id="B24">
<label>24</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>J</given-names></name>
<name><surname>Xi</surname> <given-names>C</given-names></name>
<name><surname>Dai</surname> <given-names>H</given-names></name>
<name><surname>Wang</surname> <given-names>J</given-names></name>
<name><surname>Lv</surname> <given-names>Y</given-names></name>
<name><surname>Zhang</surname> <given-names>P</given-names></name>
<etal/>
</person-group>. 
<article-title>Enhanced PET imaging using progressive conditional deep image prior</article-title>. <source>Phys Med Biol</source>. (<year>2023</year>) <volume>68</volume>:<fpage>175047</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1088/1361-6560/acf091</pub-id>, PMID: <pub-id pub-id-type="pmid">37582392</pub-id>
</mixed-citation>
</ref>
<ref id="B25">
<label>25</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Bachnas</surname> <given-names>MA</given-names></name>
<name><surname>Andonotopo</surname> <given-names>W</given-names></name>
<name><surname>Dewantiningrum</surname> <given-names>J</given-names></name>
<name><surname>Adi Pramono</surname> <given-names>MB</given-names></name>
<name><surname>Stanojevic</surname> <given-names>M</given-names></name>
<name><surname>Kurjak</surname> <given-names>A</given-names></name>
<etal/>
</person-group>. 
<article-title>The utilization of artificial intelligence in enhancing 3D/4D ultrasound analysis of fetal facial profiles</article-title>. <source>J perinatal Med</source>. (<year>2024</year>) <volume>52</volume>:<fpage>899</fpage>&#x2013;<lpage>913</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1515/jpm-2024-0347</pub-id>, PMID: <pub-id pub-id-type="pmid">39383043</pub-id>
</mixed-citation>
</ref>
<ref id="B26">
<label>26</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Sabiri</surname> <given-names>B</given-names></name>
<name><surname>Khtira</surname> <given-names>A</given-names></name>
<name><surname>El Asri</surname> <given-names>B</given-names></name>
<name><surname>Rhanoui</surname> <given-names>M</given-names></name>
</person-group>. 
<article-title>Investigating contrastive pair learning&#x2019;s frontiers in supervised, semisupervised, and self-supervised learning</article-title>. <source>J Imaging</source>. (<year>2024</year>) <volume>10</volume>:<elocation-id>196</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/jimaging10080196</pub-id>, PMID: <pub-id pub-id-type="pmid">39194985</pub-id>
</mixed-citation>
</ref>
<ref id="B27">
<label>27</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Islam</surname> <given-names>U</given-names></name>
<name><surname>Al-Atawi</surname> <given-names>A</given-names></name>
<name><surname>Alwageed</surname> <given-names>HS</given-names></name>
<name><surname>Mehmood</surname> <given-names>G</given-names></name>
<name><surname>Khan</surname> <given-names>F</given-names></name>
<name><surname>Innab</surname> <given-names>N</given-names></name>
<etal/>
</person-group>. 
<article-title>Detection of renal cell hydronephrosis in ultrasound kidney images: a study on the efficacy of deep convolutional neural networks</article-title>. <source>PeerJ. Comput Sci</source>. (<year>2024</year>) <volume>10</volume>:<fpage>e1797</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.7717/peerj-cs.1797</pub-id>, PMID: <pub-id pub-id-type="pmid">39669452</pub-id>
</mixed-citation>
</ref>
<ref id="B28">
<label>28</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ma</surname> <given-names>J</given-names></name>
<name><surname>Kong</surname> <given-names>D</given-names></name>
<name><surname>Wu</surname> <given-names>F</given-names></name>
<name><surname>Bao</surname> <given-names>L</given-names></name>
<name><surname>Yuan</surname> <given-names>J</given-names></name>
<name><surname>Liu</surname> <given-names>Y</given-names></name>
</person-group>. 
<article-title>Densely connected convolutional networks for ultrasound image based lesion segmentation</article-title>. <source>Comput Biol Med</source>. (<year>2024</year>) <volume>168</volume>:<elocation-id>107725</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compbiomed.2023.107725</pub-id>, PMID: <pub-id pub-id-type="pmid">38006827</pub-id>
</mixed-citation>
</ref>
<ref id="B29">
<label>29</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zebari</surname> <given-names>DA</given-names></name>
</person-group>. 
<article-title>Kidney disease segmentation and classification using firefly sigma seeker and magWeight rank techniques</article-title>. <source>Bioengineering</source>. (<year>2025</year>) <volume>12</volume>:<elocation-id>350</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/bioengineering12040350</pub-id>, PMID: <pub-id pub-id-type="pmid">40281710</pub-id>
</mixed-citation>
</ref>
<ref id="B30">
<label>30</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shen</surname> <given-names>Z</given-names></name>
<name><surname>Tang</surname> <given-names>C</given-names></name>
<name><surname>Xu</surname> <given-names>M</given-names></name>
<name><surname>Lei</surname> <given-names>Z</given-names></name>
</person-group>. 
<article-title>Removal of speckle noises from ultrasound images using parallel convolutional neural network</article-title>. <source>Circuits Systems Signal Process</source>. (<year>2023</year>) <volume>42</volume>:<page-range>5041&#x2013;64</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00034-023-02349-8</pub-id>
</mixed-citation>
</ref>
<ref id="B31">
<label>31</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhou</surname> <given-names>Z</given-names></name>
<name><surname>Siddiquee</surname> <given-names>MMR</given-names></name>
<name><surname>Tajbakhsh</surname> <given-names>N</given-names></name>
<name><surname>Liang</surname> <given-names>J</given-names></name>
</person-group>. 
<article-title>UNet++: A nested U-net architecture for medical image segmentation</article-title>. <source>Deep Learn Med Image Anal Multimodal Learn Clin Decision Support: 4th Int Workshop</source>. (<year>2018</year>) <volume>11045</volume>:<fpage>3</fpage>&#x2013;<lpage>11</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/978-3-030-00889-5_1</pub-id>, PMID: <pub-id pub-id-type="pmid">32613207</pub-id>
</mixed-citation>
</ref>
<ref id="B32">
<label>32</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Rajan</surname> <given-names>K</given-names></name>
<name><surname>Zielesny</surname> <given-names>A</given-names></name>
<name><surname>Steinbeck</surname> <given-names>C</given-names></name>
</person-group>. 
<article-title>DECIMER: toward deep learning for chemical image recognition</article-title>. <source>J cheminformatics</source>. (<year>2020</year>) <volume>12</volume>:<fpage>65</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s13321-020-00469-w</pub-id>, PMID: <pub-id pub-id-type="pmid">33372621</pub-id>
</mixed-citation>
</ref>
<ref id="B33">
<label>33</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Oliveira</surname> <given-names>DA</given-names></name>
<name><surname>Bresolin</surname> <given-names>T</given-names></name>
<name><surname>Coelho</surname> <given-names>SG</given-names></name>
<name><surname>Campos</surname> <given-names>MM</given-names></name>
<name><surname>Lage</surname> <given-names>CFA</given-names></name>
<name><surname>Le&#xe3;o</surname> <given-names>JM</given-names></name>
<etal/>
</person-group>. 
<article-title>A polar transformation augmentation approach for enhancing mammary gland segmentation in ultrasound images</article-title>. <source>Comput Electron Agric</source>. (<year>2024</year>) <volume>220</volume>:<elocation-id>108825</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.108825</pub-id>
</mixed-citation>
</ref>
<ref id="B34">
<label>34</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Goel</surname> <given-names>P</given-names></name>
<name><surname>Ganatra</surname> <given-names>A</given-names></name>
</person-group>. 
<article-title>Unsupervised domain adaptation for image classification and object detection using guided transfer learning approach and JS divergence</article-title>. <source>Sensors</source>. (<year>2023</year>) <volume>23</volume>:<elocation-id>4436</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s23094436</pub-id>, PMID: <pub-id pub-id-type="pmid">37177640</pub-id>
</mixed-citation>
</ref>
<ref id="B35">
<label>35</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Guo</surname> <given-names>S</given-names></name>
<name><surname>Chen</surname> <given-names>H</given-names></name>
<name><surname>Sheng</surname> <given-names>X</given-names></name>
<name><surname>Xiong</surname> <given-names>Y</given-names></name>
<name><surname>Wu</surname> <given-names>M</given-names></name>
<name><surname>Fischer</surname> <given-names>K</given-names></name>
<etal/>
</person-group>. 
<article-title>Cross-modal transfer learning based on an improved cycleGAN model for accurate kidney segmentation in ultrasound images</article-title>. <source>Ultrasound Med Biol</source>. (<year>2024</year>) <volume>50</volume>:<page-range>1638&#x2013;45</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ultrasmedbio.2024.06.009</pub-id>, PMID: <pub-id pub-id-type="pmid">39181806</pub-id>
</mixed-citation>
</ref>
<ref id="B36">
<label>36</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Liu</surname> <given-names>Y</given-names></name>
<name><surname>Zhao</surname> <given-names>Y</given-names></name>
<name><surname>Xiao</surname> <given-names>Z</given-names></name>
<name><surname>Geng</surname> <given-names>L</given-names></name>
<name><surname>Xiao</surname> <given-names>Z</given-names></name>
</person-group>. 
<article-title>Multiscale subgraph adversarial contrastive learning</article-title>. <source>IEEE Trans Neural Networks Learn Syst</source>. (<year>2025</year>) <volume>36</volume>:<page-range>15001&#x2013;14</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TNNLS.2025.3543954</pub-id>, PMID: <pub-id pub-id-type="pmid">40117156</pub-id>
</mixed-citation>
</ref>
<ref id="B37">
<label>37</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Marsousi</surname> <given-names>M</given-names></name>
<name><surname>Plataniotis</surname> <given-names>KN</given-names></name>
<name><surname>Stergiopoulos</surname> <given-names>S</given-names></name>
</person-group>. 
<article-title>Kidney detection in 3-D ultrasound imagery via shape-to-volume registration based on spatially aligned neural network</article-title>. <source>IEEE J Biomed Health Inf</source>. (<year>2019</year>) <volume>23</volume>:<page-range>227&#x2013;42</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/JBHI.2018.2805777</pub-id>, PMID: <pub-id pub-id-type="pmid">29993823</pub-id>
</mixed-citation>
</ref>
<ref id="B38">
<label>38</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Peng</surname> <given-names>T</given-names></name>
<name><surname>Gu</surname> <given-names>Y</given-names></name>
<name><surname>Ruan</surname> <given-names>SJ</given-names></name>
<name><surname>Wu</surname> <given-names>QJ</given-names></name>
<name><surname>Cai</surname> <given-names>J</given-names></name>
</person-group>. 
<article-title>Novel solution for using neural networks for kidney boundary extraction in 2D ultrasound data</article-title>. <source>Biomolecules</source>. (<year>2023</year>) <volume>13</volume>:<elocation-id>1548</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/biom13101548</pub-id>, PMID: <pub-id pub-id-type="pmid">37892229</pub-id>
</mixed-citation>
</ref>
<ref id="B39">
<label>39</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Khaledyan</surname> <given-names>D</given-names></name>
<name><surname>Marini</surname> <given-names>TJ</given-names></name>
<name><surname>O'Connell</surname> <given-names>A</given-names></name>
<name><surname>Meng</surname> <given-names>S</given-names></name>
<name><surname>Kan</surname> <given-names>J</given-names></name>
<name><surname>Brennan</surname> <given-names>G</given-names></name>
<etal/>
</person-group>. 
<article-title>WATUNet: a deep neural network for segmentation of volumetric sweep imaging ultrasound</article-title>. <source>Mach Learning: Sci Technol</source>. (<year>2024</year>) <volume>5</volume>:<fpage>015042</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1088/2632-2153/ad2e15</pub-id>, PMID: <pub-id pub-id-type="pmid">38464559</pub-id>
</mixed-citation>
</ref>
<ref id="B40">
<label>40</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Singla</surname> <given-names>R</given-names></name>
<name><surname>Ringstrom</surname> <given-names>C</given-names></name>
<name><surname>Hu</surname> <given-names>R</given-names></name>
<name><surname>Hu</surname> <given-names>Z</given-names></name>
<name><surname>Lessoway</surname> <given-names>V</given-names></name>
<name><surname>Reid</surname> <given-names>J</given-names></name>
<etal/>
</person-group>. 
<article-title>Automatic measurement of kidney dimensions in two-dimensional ultrasonography is comparable to expert sonographers</article-title>. <source>J Med Imaging</source>. (<year>2023</year>) <volume>10</volume>:<elocation-id>34003</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1117/1.JMI.10.3.034003</pub-id>, PMID: <pub-id pub-id-type="pmid">37304526</pub-id>
</mixed-citation>
</ref>
<ref id="B41">
<label>41</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Khan</surname> <given-names>R</given-names></name>
<name><surname>Xiao</surname> <given-names>C</given-names></name>
<name><surname>Liu</surname> <given-names>Y</given-names></name>
<name><surname>Tian</surname> <given-names>J</given-names></name>
<name><surname>Chen</surname> <given-names>Z</given-names></name>
<name><surname>Su</surname> <given-names>L</given-names></name>
<etal/>
</person-group>. 
<article-title>Transformative deep neural network approaches in kidney ultrasound segmentation: empirical validation with an annotated dataset</article-title>. <source>Interdiscip sciences Comput Life Sci</source>. (<year>2024</year>) <volume>16</volume>:<page-range>439&#x2013;54</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s12539-024-00620-3</pub-id>, PMID: <pub-id pub-id-type="pmid">38413547</pub-id>
</mixed-citation>
</ref>
<ref id="B42">
<label>42</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Guo</surname> <given-names>J</given-names></name>
<name><surname>Odu</surname> <given-names>A</given-names></name>
<name><surname>Pedrosa</surname> <given-names>I</given-names></name>
</person-group>. 
<article-title>Deep learning kidney segmentation with very limited training data using a cascaded convolution neural network</article-title>. <source>PloS One</source>. (<year>2022</year>) <volume>17</volume>:<fpage>e0267753</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pone.0267753</pub-id>, PMID: <pub-id pub-id-type="pmid">35533181</pub-id>
</mixed-citation>
</ref>
<ref id="B43">
<label>43</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yu</surname> <given-names>Z</given-names></name>
<name><surname>Zhao</surname> <given-names>T</given-names></name>
<name><surname>Xi</surname> <given-names>Z</given-names></name>
<name><surname>Zhang</surname> <given-names>Y</given-names></name>
<name><surname>Zhang</surname> <given-names>X</given-names></name>
<name><surname>Wang</surname> <given-names>X</given-names></name>
<etal/>
</person-group>. 
<article-title>Using CT images to assist the segmentation of MR images via generalization: Segmentation of the renal parenchyma of renal carcinoma patients</article-title>. <source>Med Phys</source>. (<year>2025</year>) <volume>52</volume>:<page-range>951&#x2013;64</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/mp.17494</pub-id>, PMID: <pub-id pub-id-type="pmid">39494916</pub-id>
</mixed-citation>
</ref>
<ref id="B44">
<label>44</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ghaith</surname> <given-names>N</given-names></name>
<name><surname>Malaeb</surname> <given-names>B</given-names></name>
<name><surname>Itani</surname> <given-names>R</given-names></name>
<name><surname>Alnafea</surname> <given-names>M</given-names></name>
<name><surname>Al Faraj</surname> <given-names>A</given-names></name>
</person-group>. 
<article-title>Correlation of kidney size on computed tomography with GFR, creatinine and hbA1C for an accurate diagnosis of patients with diabetes and/or chronic kidney disease</article-title>. <source>Diagnostics</source>. (<year>2021</year>) <volume>11</volume>:<elocation-id>789</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/diagnostics11050789</pub-id>, PMID: <pub-id pub-id-type="pmid">33925666</pub-id>
</mixed-citation>
</ref>
<ref id="B45">
<label>45</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Fernandez</surname> <given-names>JM</given-names></name>
<name><surname>Hernandez-Socorro</surname> <given-names>CR</given-names></name>
<name><surname>Robador</surname> <given-names>LO</given-names></name>
<name><surname>Rodr&#xed;guez-Esparrag&#xf3;n</surname> <given-names>F</given-names></name>
<name><surname>Medina-Garc&#xed;a</surname> <given-names>D</given-names></name>
<name><surname>Quevedo-Reina</surname> <given-names>JC</given-names></name>
<etal/>
</person-group>. 
<article-title>Ultrasound versus magnetic resonance imaging for calculating total kidney volume in patients with ADPKD: a real-world data analysis</article-title>. <source>ultrasound J</source>. (<year>2025</year>) <volume>17</volume>:<elocation-id>13</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s13089-025-00400-0</pub-id>, PMID: <pub-id pub-id-type="pmid">39934453</pub-id>
</mixed-citation>
</ref>
<ref id="B46">
<label>46</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Silva</surname> <given-names>F</given-names></name>
<name><surname>Malheiro</surname> <given-names>J</given-names></name>
<name><surname>Pestana</surname> <given-names>N</given-names></name>
<name><surname>Ribeiro</surname> <given-names>C</given-names></name>
<name><surname>Nunes-Carneiro</surname> <given-names>D</given-names></name>
<name><surname>Mandanelo</surname> <given-names>M</given-names></name>
<etal/>
</person-group>. 
<article-title>Lower donated kidney volume is associated with increased risk of lower graft function and acute rejection at 1 year after living donor kidney-a retrospective study</article-title>. <source>Transplant Int</source>. (<year>2020</year>) <volume>33</volume>:<page-range>1711&#x2013;22</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/tri.13740</pub-id>, PMID: <pub-id pub-id-type="pmid">32910834</pub-id>
</mixed-citation>
</ref>
<ref id="B47">
<label>47</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Khosravi</surname> <given-names>M</given-names></name>
<name><surname>Mokhtari</surname> <given-names>G</given-names></name>
<name><surname>Ramezanzade</surname> <given-names>E</given-names></name>
<name><surname>Yazdanipour</surname> <given-names>MA</given-names></name>
<name><surname>Monfared</surname> <given-names>A</given-names></name>
<name><surname>Haghighi</surname> <given-names>H</given-names></name>
<etal/>
</person-group>. 
<article-title>Relationship between donated kidney volume determined by ultrasound adjusted for clinical factors and 1-month and 1-year creatinine clearance: A retrospective study</article-title>. <source>Clin Nephrol</source>. (<year>2023</year>) <volume>99</volume>:<fpage>1</fpage>&#x2013;<lpage>10</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5414/CN110964</pub-id>, PMID: <pub-id pub-id-type="pmid">36331020</pub-id>
</mixed-citation>
</ref>
<ref id="B48">
<label>48</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Al Salmi</surname> <given-names>I</given-names></name>
<name><surname>Al Hajriy</surname> <given-names>M</given-names></name>
<name><surname>Hannawi</surname> <given-names>S</given-names></name>
</person-group>. 
<article-title>Ultrasound measurement and kidney development: a mini-review for nephrologists</article-title>. <source>Saudi J Kidney Dis Transplant</source>. (<year>2021</year>) <volume>32</volume>:<page-range>174&#x2013;82</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.4103/1319-2442.318520</pub-id>, PMID: <pub-id pub-id-type="pmid">34145128</pub-id>
</mixed-citation>
</ref>
<ref id="B49">
<label>49</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Cai</surname> <given-names>L</given-names></name>
<name><surname>Li</surname> <given-names>Q</given-names></name>
<name><surname>Zhang</surname> <given-names>J</given-names></name>
<name><surname>Zhang</surname> <given-names>Z</given-names></name>
<name><surname>Yang</surname> <given-names>R</given-names></name>
<name><surname>Zhang</surname> <given-names>L</given-names></name>
<etal/>
</person-group>. 
<article-title>Ultrasound image segmentation based on Transformer and U-Net with joint loss</article-title>. <source>PeerJ. Comput Sci</source>. (<year>2023</year>) <volume>9</volume>:<fpage>e1638</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.7717/peerj-cs.1638</pub-id>, PMID: <pub-id pub-id-type="pmid">38077559</pub-id>
</mixed-citation>
</ref>
<ref id="B50">
<label>50</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>WB</given-names></name>
<name><surname>Zhou</surname> <given-names>P</given-names></name>
<name><surname>Chen</surname> <given-names>Y</given-names></name>
<name><surname>Zhou</surname> <given-names>GQ</given-names></name>
</person-group>. 
<article-title>Frequency-phase guided attention complex-valued network for ultrasound image segmentation</article-title>. <source>IEEE J Biomed Health Inf</source>. (<year>2025</year>) <volume>29</volume>:<page-range>5773&#x2013;86</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/JBHI.2025.3565311</pub-id>, PMID: <pub-id pub-id-type="pmid">40299743</pub-id>
</mixed-citation>
</ref>
<ref id="B51">
<label>51</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xiao</surname> <given-names>X</given-names></name>
<name><surname>Zhang</surname> <given-names>J</given-names></name>
<name><surname>Shao</surname> <given-names>Y</given-names></name>
<name><surname>Liu</surname> <given-names>J</given-names></name>
<name><surname>Shi</surname> <given-names>K</given-names></name>
<name><surname>He</surname> <given-names>C</given-names></name>
<etal/>
</person-group>. 
<article-title>Deep learning-based medical ultrasound image and video segmentation methods: overview, frontiers, and challenges</article-title>. <source>Sensors</source>. (<year>2025</year>) <volume>25</volume>:<elocation-id>2361</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s25082361</pub-id>, PMID: <pub-id pub-id-type="pmid">40285051</pub-id>
</mixed-citation>
</ref>
<ref id="B52">
<label>52</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Song</surname> <given-names>Y</given-names></name>
<name><surname>Zheng</surname> <given-names>J</given-names></name>
<name><surname>Lei</surname> <given-names>L</given-names></name>
<name><surname>Ni</surname> <given-names>Z</given-names></name>
<name><surname>Zhao</surname> <given-names>B</given-names></name>
<name><surname>Hu</surname> <given-names>Y</given-names></name>
<etal/>
</person-group>. 
<article-title>CT2US: Cross-modal transfer learning for kidney segmentation in ultrasound images with synthesized data</article-title>. <source>Ultrasonics</source>. (<year>2022</year>) <volume>122</volume>:<elocation-id>106706</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ultras.2022.106706</pub-id>, PMID: <pub-id pub-id-type="pmid">35149255</pub-id>
</mixed-citation>
</ref>
<ref id="B53">
<label>53</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chen</surname> <given-names>G</given-names></name>
<name><surname>Yin</surname> <given-names>J</given-names></name>
<name><surname>Dai</surname> <given-names>Y</given-names></name>
<name><surname>Zhang</surname> <given-names>J</given-names></name>
<name><surname>Yin</surname> <given-names>X</given-names></name>
<name><surname>Cui</surname> <given-names>L</given-names></name>
<etal/>
</person-group>. 
<article-title>A novel convolutional neural network for kidney ultrasound images segmentation</article-title>. <source>Comput Methods programs biomedicine</source>. (<year>2022</year>) <volume>218</volume>:<elocation-id>106712</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.cmpb.2022.106712</pub-id>, PMID: <pub-id pub-id-type="pmid">35248816</pub-id>
</mixed-citation>
</ref>
<ref id="B54">
<label>54</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Khan</surname> <given-names>R</given-names></name>
<name><surname>Zaman</surname> <given-names>A</given-names></name>
<name><surname>Chen</surname> <given-names>C</given-names></name>
<name><surname>Xiao</surname> <given-names>C</given-names></name>
<name><surname>Zhong</surname> <given-names>W</given-names></name>
<name><surname>Liu</surname> <given-names>Y</given-names></name>
<etal/>
</person-group>. 
<article-title>MLAU-Net: Deep supervised attention and hybrid loss strategies for enhanced segmentation of low-resolution kidney ultrasound</article-title>. <source>Digital Health</source>. (<year>2024</year>) <volume>10</volume>:<elocation-id>20552076241291306</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1177/20552076241291306</pub-id>, PMID: <pub-id pub-id-type="pmid">39559387</pub-id>
</mixed-citation>
</ref>
<ref id="B55">
<label>55</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Alex</surname> <given-names>DM</given-names></name>
<name><surname>Abraham Chandy</surname> <given-names>D</given-names></name>
<name><surname>Hepzibah Christinal</surname> <given-names>A</given-names></name>
<name><surname>Arvinder</surname> <given-names>S</given-names></name>
<name><surname>Pushkaran</surname> <given-names>M</given-names></name>
</person-group>. 
<article-title>YSegNet: a novel deep learning network for kidney segmentation in 2D ultrasound images</article-title>. <source>Neural Computing Appl</source>. (<year>2022</year>) <volume>34</volume>:<page-range>22405&#x2013;16</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00521-022-07624-4</pub-id>
</mixed-citation>
</ref>
<ref id="B56">
<label>56</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Sharifzadeh</surname> <given-names>M</given-names></name>
<name><surname>Benali</surname> <given-names>H</given-names></name>
<name><surname>Rivaz</surname> <given-names>H</given-names></name>
</person-group>. 
<article-title>Investigating shift variance of convolutional neural networks in ultrasound image segmentation</article-title>. <source>IEEE Trans Ultrasonics Ferroelectrics Frequency Control</source>. (<year>2022</year>) <volume>69</volume>:<page-range>1703&#x2013;13</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/tuffc.2022.3162800</pub-id>, PMID: <pub-id pub-id-type="pmid">35344491</pub-id>
</mixed-citation>
</ref>
<ref id="B57">
<label>57</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Weerasinghe</surname> <given-names>NH</given-names></name>
<name><surname>Lovell</surname> <given-names>NH</given-names></name>
<name><surname>Welsh</surname> <given-names>AW</given-names></name>
<name><surname>Stevenson</surname> <given-names>GN</given-names></name>
</person-group>. 
<article-title>Multi-parametric fusion of 3D power doppler ultrasound for fetal kidney segmentation using fully convolutional neural networks</article-title>. <source>IEEE J Biomed Health Inf</source>. (<year>2021</year>) <volume>25</volume>:<page-range>2050&#x2013;7</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/JBHI.2020.3027318</pub-id>, PMID: <pub-id pub-id-type="pmid">32991292</pub-id>
</mixed-citation>
</ref>
<ref id="B58">
<label>58</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Guo</surname> <given-names>S</given-names></name>
<name><surname>Sheng</surname> <given-names>X</given-names></name>
<name><surname>Chen</surname> <given-names>H</given-names></name>
<name><surname>Zhang</surname> <given-names>J</given-names></name>
<name><surname>Peng</surname> <given-names>Q</given-names></name>
<name><surname>Wu</surname> <given-names>M</given-names></name>
<etal/>
</person-group>. 
<article-title>A novel cross-modal data augmentation method based on contrastive unpaired translation network for kidney segmentation in ultrasound imaging</article-title>. <source>Med Phys</source>. (<year>2025</year>) <volume>52</volume>:<page-range>3877&#x2013;87</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/mp.17663</pub-id>, PMID: <pub-id pub-id-type="pmid">39904615</pub-id>
</mixed-citation>
</ref>
<ref id="B59">
<label>59</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jagtap</surname> <given-names>JM</given-names></name>
<name><surname>Gregory</surname> <given-names>AV</given-names></name>
<name><surname>Homes</surname> <given-names>HL</given-names></name>
<name><surname>Wright</surname> <given-names>DE</given-names></name>
<name><surname>Edwards</surname> <given-names>ME</given-names></name>
<name><surname>Akkus</surname> <given-names>Z</given-names></name>
<etal/>
</person-group>. 
<article-title>Automated measurement of total kidney volume from 3D ultrasound images of patients affected by polycystic kidney disease and comparison to MR measurements</article-title>. <source>Abdominal Radiol</source>. (<year>2022</year>) <volume>47</volume>:<page-range>2408&#x2013;19</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00261-022-03521-5</pub-id>, PMID: <pub-id pub-id-type="pmid">35476147</pub-id>
</mixed-citation>
</ref>
<ref id="B60">
<label>60</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Oghli</surname> <given-names>MG</given-names></name>
<name><surname>Bagheri</surname> <given-names>SM</given-names></name>
<name><surname>Shabanzadeh</surname> <given-names>A</given-names></name>
<name><surname>Mehrjardi</surname> <given-names>MZ</given-names></name>
<name><surname>Akhavan</surname> <given-names>A</given-names></name>
<name><surname>Shiri</surname> <given-names>I</given-names></name>
<etal/>
</person-group>. 
<article-title>Fully automated kidney image biomarker prediction in ultrasound scans using Fast-Unet+</article-title>. <source>Sci Rep</source>. (<year>2024</year>) <volume>14</volume>:<fpage>4782</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41598-024-55106-5</pub-id>, PMID: <pub-id pub-id-type="pmid">38413748</pub-id>
</mixed-citation>
</ref>
<ref id="B61">
<label>61</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kim</surname> <given-names>DW</given-names></name>
<name><surname>Ahn</surname> <given-names>HG</given-names></name>
<name><surname>Kim</surname> <given-names>J</given-names></name>
<name><surname>Yoon</surname> <given-names>CS</given-names></name>
<name><surname>Kim</surname> <given-names>JH</given-names></name>
<name><surname>Yang</surname> <given-names>S</given-names></name>
</person-group>. 
<article-title>Advanced kidney volume measurement method using ultrasonography with artificial intelligence-based hybrid learning in children</article-title>. <source>Sensors</source>. (<year>2021</year>) <volume>21</volume>:<elocation-id>6846</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s21206846</pub-id>, PMID: <pub-id pub-id-type="pmid">34696057</pub-id>
</mixed-citation>
</ref>
<ref id="B62">
<label>62</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Esser</surname> <given-names>M</given-names></name>
<name><surname>Tsiflikas</surname> <given-names>I</given-names></name>
<name><surname>Jago</surname> <given-names>JR</given-names></name>
<name><surname>Rouet</surname> <given-names>L</given-names></name>
<name><surname>Stebner</surname> <given-names>A</given-names></name>
<name><surname>Sch&#xe4;fer</surname> <given-names>JF</given-names></name>
</person-group>. 
<article-title>Semiautomatic three-dimensional ultrasound renal volume segmentation in pediatric hydronephrosis: interrater agreement and correlation to conventional hydronephrosis grading</article-title>. <source>Pediatr Radiol</source>. (<year>2025</year>) <volume>55</volume>:<page-range>1298&#x2013;307</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00247-025-06249-8</pub-id>, PMID: <pub-id pub-id-type="pmid">40327095</pub-id>
</mixed-citation>
</ref>
<ref id="B63">
<label>63</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Dev</surname> <given-names>H</given-names></name>
<name><surname>Zhu</surname> <given-names>C</given-names></name>
<name><surname>Sharbatdaran</surname> <given-names>A</given-names></name>
<name><surname>Raza</surname> <given-names>SI</given-names></name>
<name><surname>Wang</surname> <given-names>SJ</given-names></name>
<name><surname>Romano</surname> <given-names>DJ</given-names></name>
<etal/>
</person-group>. 
<article-title>Effect of averaging measurements from multiple MRI pulse sequences on kidney volume reproducibility in autosomal dominant polycystic kidney disease</article-title>. <source>J magnetic resonance Imaging</source>. (<year>2023</year>) <volume>58</volume>:<page-range>1153&#x2013;60</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/jmri.28593</pub-id>, PMID: <pub-id pub-id-type="pmid">36645114</pub-id>
</mixed-citation>
</ref>
<ref id="B64">
<label>64</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Akbari</surname> <given-names>P</given-names></name>
<name><surname>Nasri</surname> <given-names>F</given-names></name>
<name><surname>Deng</surname> <given-names>SX</given-names></name>
<name><surname>Khowaja</surname> <given-names>S</given-names></name>
<name><surname>Lee</surname> <given-names>SH</given-names></name>
<name><surname>Warnica</surname> <given-names>W</given-names></name>
<etal/>
</person-group>. 
<article-title>Total kidney volume measurements in ADPKD by 3D and ellipsoid ultrasound in comparison with magnetic resonance imaging</article-title>. <source>Clin J Am Soc Nephrol</source>. (<year>2022</year>) <volume>17</volume>:<page-range>827&#x2013;34</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.2215/CJN.14931121</pub-id>, PMID: <pub-id pub-id-type="pmid">35383043</pub-id>
</mixed-citation>
</ref>
<ref id="B65">
<label>65</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ma</surname> <given-names>F</given-names></name>
<name><surname>Sun</surname> <given-names>T</given-names></name>
<name><surname>Liu</surname> <given-names>L</given-names></name>
<name><surname>Jing</surname> <given-names>H</given-names></name>
</person-group>. 
<article-title>Detection and diagnosis of chronic kidney disease using deep learning-based heterogeneous modified artificial neural network</article-title>. <source>Future Generation Comput Syst</source>. (<year>2020</year>) <volume>111</volume>:<page-range>17&#x2013;26</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.future.2020.04.036</pub-id>
</mixed-citation>
</ref>
<ref id="B66">
<label>66</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Weaver</surname> <given-names>JK</given-names></name>
<name><surname>Milford</surname> <given-names>K</given-names></name>
<name><surname>Rickard</surname> <given-names>M</given-names></name>
<name><surname>Logan</surname> <given-names>J</given-names></name>
<name><surname>Erdman</surname> <given-names>L</given-names></name>
<name><surname>Viteri</surname> <given-names>B</given-names></name>
<etal/>
</person-group>. 
<article-title>Deep learning imaging features derived from kidney ultrasounds predict chronic kidney disease progression in children with posterior urethral valves</article-title>. <source>Pediatr Nephrol</source>. (<year>2023</year>) <volume>38</volume>:<page-range>839&#x2013;46</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00467-022-05677-0</pub-id>, PMID: <pub-id pub-id-type="pmid">35867160</pub-id>
</mixed-citation>
</ref>
<ref id="B67">
<label>67</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lee</surname> <given-names>M</given-names></name>
<name><surname>Wei</surname> <given-names>S</given-names></name>
<name><surname>Anaokar</surname> <given-names>J</given-names></name>
<name><surname>Uzzo</surname> <given-names>R</given-names></name>
<name><surname>Kutikov</surname> <given-names>A</given-names></name>
</person-group>. 
<article-title>Kidney cancer management 3.0: can artificial intelligence make us better</article-title>? <source>Curr Opin Urol</source>. (<year>2021</year>) <volume>31</volume>:<page-range>409&#x2013;15</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1097/MOU.0000000000000881</pub-id>, PMID: <pub-id pub-id-type="pmid">33882560</pub-id>
</mixed-citation>
</ref>
<ref id="B68">
<label>68</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Khalid</surname> <given-names>F</given-names></name>
<name><surname>Alsadoun</surname> <given-names>L</given-names></name>
<name><surname>Khilji</surname> <given-names>F</given-names></name>
<name><surname>Mushtaq</surname> <given-names>M</given-names></name>
<name><surname>Eze-Odurukwe</surname> <given-names>A</given-names></name>
<name><surname>Mushtaq</surname> <given-names>MM</given-names></name>
<etal/>
</person-group>. 
<article-title>Predicting the progression of chronic kidney disease: A systematic review of artificial intelligence and machine learning approaches</article-title>. <source>Cureus</source>. (<year>2024</year>) <volume>16</volume>:<fpage>e60145</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.7759/cureus.60145</pub-id>, PMID: <pub-id pub-id-type="pmid">38864072</pub-id>
</mixed-citation>
</ref>
<ref id="B69">
<label>69</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jhamb</surname> <given-names>M</given-names></name>
<name><surname>Weltman</surname> <given-names>MR</given-names></name>
<name><surname>Devaraj</surname> <given-names>SM</given-names></name>
<name><surname>Lavenburg</surname> <given-names>LU</given-names></name>
<name><surname>Han</surname> <given-names>Z</given-names></name>
<name><surname>Alghwiri</surname> <given-names>AA</given-names></name>
<etal/>
</person-group>. 
<article-title>Electronic health record population health management for chronic kidney disease care: A cluster randomized clinical trial</article-title>. <source>JAMA Internal Med</source>. (<year>2024</year>) <volume>184</volume>:<page-range>737&#x2013;47</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1001/jamainternmed.2024.0708</pub-id>, PMID: <pub-id pub-id-type="pmid">38619824</pub-id>
</mixed-citation>
</ref>
<ref id="B70">
<label>70</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Inaguma</surname> <given-names>D</given-names></name>
<name><surname>Kitagawa</surname> <given-names>A</given-names></name>
<name><surname>Yanagiya</surname> <given-names>R</given-names></name>
<name><surname>Koseki</surname> <given-names>A</given-names></name>
<name><surname>Iwamori</surname> <given-names>T</given-names></name>
<name><surname>Kudo</surname> <given-names>M</given-names></name>
<etal/>
</person-group>. 
<article-title>Increasing tendency of urine protein is a risk factor for rapid eGFR decline in patients with CKD: A machine learning-based prediction model by using a big database</article-title>. <source>PloS One</source>. (<year>2020</year>) <volume>15</volume>:<fpage>e0239262</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pone.0239262</pub-id>, PMID: <pub-id pub-id-type="pmid">32941535</pub-id>
</mixed-citation>
</ref>
<ref id="B71">
<label>71</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ying</surname> <given-names>F</given-names></name>
<name><surname>Chen</surname> <given-names>S</given-names></name>
<name><surname>Pan</surname> <given-names>G</given-names></name>
<etal/>
</person-group>. 
<article-title>Artificial intelligence pulse coupled neural network algorithm in the diagnosis and treatment of severe sepsis complicated with acute kidney injury under ultrasound image</article-title>. <source>J healthcare Eng</source>. (<year>2021</year>) <volume>2021</volume>:<fpage>6761364</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1155/2021/6761364</pub-id>, PMID: <pub-id pub-id-type="pmid">34336164</pub-id>
</mixed-citation>
</ref>
<ref id="B72">
<label>72</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Alqaissi</surname> <given-names>E</given-names></name>
<name><surname>Algarni</surname> <given-names>A</given-names></name>
<name><surname>Alshehri</surname> <given-names>M</given-names></name>
<name><surname>Alkhaldy</surname> <given-names>H</given-names></name>
<name><surname>Alshehri</surname> <given-names>A</given-names></name>
</person-group>. 
<article-title>A recursive embedding and clustering technique for unraveling asymptomatic kidney disease using laboratory data and machine learning</article-title>. <source>Sci Rep</source>. (<year>2025</year>) <volume>15</volume>:<fpage>5820</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41598-025-89499-8</pub-id>, PMID: <pub-id pub-id-type="pmid">39962186</pub-id>
</mixed-citation>
</ref>
<ref id="B73">
<label>73</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chen</surname> <given-names>Z</given-names></name>
<name><surname>Chen</surname> <given-names>J</given-names></name>
<name><surname>Ying</surname> <given-names>TC</given-names></name>
<name><surname>Chen</surname> <given-names>H</given-names></name>
<name><surname>Wu</surname> <given-names>C</given-names></name>
<name><surname>Chen</surname> <given-names>X</given-names></name>
<etal/>
</person-group>. 
<article-title>Development and deployment of a novel diagnostic tool based on conventional ultrasound for fibrosis assessment in chronic kidney disease</article-title>. <source>Acad Radiol</source>. (<year>2023</year>) <volume>30</volume>:<page-range>S295&#x2013;304</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.acra.2023.02.018</pub-id>, PMID: <pub-id pub-id-type="pmid">36973117</pub-id>
</mixed-citation>
</ref>
<ref id="B74">
<label>74</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Delrue</surname> <given-names>C</given-names></name>
<name><surname>De Bruyne</surname> <given-names>S</given-names></name>
<name><surname>Speeckaert</surname> <given-names>MM</given-names></name>
</person-group>. 
<article-title>Application of machine learning in chronic kidney disease: current status and future prospects</article-title>. <source>Biomedicines</source>. (<year>2024</year>) <volume>12</volume>:<elocation-id>568</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/biomedicines12030568</pub-id>, PMID: <pub-id pub-id-type="pmid">38540181</pub-id>
</mixed-citation>
</ref>
<ref id="B75">
<label>75</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Molina-Moreno</surname> <given-names>M</given-names></name>
<name><surname>Gonzalez-Diaz</surname> <given-names>I</given-names></name>
<name><surname>Rivera Gorrin</surname> <given-names>M</given-names></name>
<name><surname>Burguera</surname> <given-names>Vion V</given-names></name>
<name><surname>D&#xed;az-de-Mar&#xed;a</surname> <given-names>F</given-names></name>
</person-group>. 
<article-title>URI-CADS: A fully automated computer-aided diagnosis system for ultrasound renal imaging</article-title>. <source>J Imaging Inf Med</source>. (<year>2024</year>) <volume>37</volume>:<page-range>1458&#x2013;74</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s10278-024-01055-4</pub-id>, PMID: <pub-id pub-id-type="pmid">38413459</pub-id>
</mixed-citation>
</ref>
<ref id="B76">
<label>76</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gogoi</surname> <given-names>P</given-names></name>
<name><surname>Valan</surname> <given-names>JA</given-names></name>
</person-group>. 
<article-title>Machine learning approaches for predicting and diagnosing chronic kidney disease: current trends, challenges, solutions, and future directions</article-title>. <source>Int Urol Nephrol</source>. (<year>2025</year>) <volume>57</volume>:<page-range>1245&#x2013;68</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11255-024-04281-5</pub-id>, PMID: <pub-id pub-id-type="pmid">39560857</pub-id>
</mixed-citation>
</ref>
<ref id="B77">
<label>77</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shi</surname> <given-names>S</given-names></name>
</person-group>. 
<article-title>A novel hybrid deep learning architecture for predicting acute kidney injury using patient record data and ultrasound kidney images</article-title>. <source>Appl Artif Intell</source>. (<year>2021</year>) <volume>35</volume>:<page-range>1329&#x2013;45</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/08839514.2021.1976908</pub-id>
</mixed-citation>
</ref>
<ref id="B78">
<label>78</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jeong</surname> <given-names>I</given-names></name>
<name><surname>Cho</surname> <given-names>NJ</given-names></name>
<name><surname>Ahn</surname> <given-names>SJ</given-names></name>
<name><surname>Lee</surname> <given-names>H</given-names></name>
<name><surname>Gil</surname> <given-names>HW</given-names></name>
</person-group>. 
<article-title>Machine learning approaches toward an understanding of acute kidney injury: current trends and future directions</article-title>. <source>Korean J Internal Med</source>. (<year>2024</year>) <volume>39</volume>:<page-range>882&#x2013;97</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.3904/kjim.2024.098</pub-id>, PMID: <pub-id pub-id-type="pmid">39468926</pub-id>
</mixed-citation>
</ref>
<ref id="B79">
<label>79</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xu</surname> <given-names>Q</given-names></name>
<name><surname>Qiang</surname> <given-names>B</given-names></name>
<name><surname>Pan</surname> <given-names>Y</given-names></name>
<name><surname>Li</surname> <given-names>J</given-names></name>
</person-group>. 
<article-title>Alteration in shear wave elastography is associated with acute kidney injury: a prospective observational pilot study</article-title>. <source>Shock</source>. (<year>2023</year>) <volume>59</volume>:<page-range>375&#x2013;84</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1097/SHK.0000000000002070</pub-id>, PMID: <pub-id pub-id-type="pmid">36567550</pub-id>
</mixed-citation>
</ref>
<ref id="B80">
<label>80</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wang</surname> <given-names>Y</given-names></name>
<name><surname>Xu</surname> <given-names>F</given-names></name>
<name><surname>Han</surname> <given-names>Q</given-names></name>
<name><surname>Geng</surname> <given-names>D</given-names></name>
<name><surname>Gao</surname> <given-names>X</given-names></name>
<name><surname>Xu</surname> <given-names>B</given-names></name>
<etal/>
</person-group>. 
<article-title>AI-based automatic estimation of single-kidney glomerular filtration rate and split renal function using noncontrast CT</article-title>. <source>Insights into Imaging</source>. (<year>2025</year>) <volume>16</volume>:<fpage>84</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s13244-025-01959-x</pub-id>, PMID: <pub-id pub-id-type="pmid">40192862</pub-id>
</mixed-citation>
</ref>
<ref id="B81">
<label>81</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Chen</surname> <given-names>Z</given-names></name>
<name><surname>Ying</surname> <given-names>MTC</given-names></name>
<name><surname>Wang</surname> <given-names>Y</given-names></name>
<name><surname>Chen</surname> <given-names>J</given-names></name>
<name><surname>Wu</surname> <given-names>C</given-names></name>
<name><surname>Han</surname> <given-names>X</given-names></name>
<etal/>
</person-group>. 
<article-title>Ultrasound-based radiomics analysis in the assessment of renal fibrosis in patients with chronic kidney disease</article-title>. <source>Abdominal Radiol</source>. (<year>2023</year>) <volume>48</volume>:<page-range>2649&#x2013;57</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00261-023-03965-3</pub-id>, PMID: <pub-id pub-id-type="pmid">37256330</pub-id>
</mixed-citation>
</ref>
<ref id="B82">
<label>82</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yuan</surname> <given-names>H</given-names></name>
<name><surname>Huang</surname> <given-names>Q</given-names></name>
<name><surname>Wen</surname> <given-names>J</given-names></name>
<name><surname>Gao</surname> <given-names>Y</given-names></name>
</person-group>. 
<article-title>Ultrasound viscoelastic imaging in the noninvasive quantitative assessment of chronic kidney disease</article-title>. <source>Renal failure</source>. (<year>2024</year>) <volume>46</volume>:<elocation-id>2407882</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/0886022X.2024.2407882</pub-id>, PMID: <pub-id pub-id-type="pmid">39344493</pub-id>
</mixed-citation>
</ref>
<ref id="B83">
<label>83</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Huang</surname> <given-names>X</given-names></name>
<name><surname>Wei</surname> <given-names>T</given-names></name>
<name><surname>Li</surname> <given-names>J</given-names></name>
<name><surname>Xu</surname> <given-names>L</given-names></name>
<name><surname>Tang</surname> <given-names>Y</given-names></name>
<name><surname>Liao</surname> <given-names>JT</given-names></name>
<etal/>
</person-group>. 
<article-title>Multimodal ultrasound for assessment of renal fibrosis in biopsy-proven patients with chronic kidney disease</article-title>. <source>Ultraschall der Med</source>. (<year>2025</year>) <volume>31</volume>:<page-range>1&#x2013;10</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1055/a-2559-7743</pub-id>, PMID: <pub-id pub-id-type="pmid">40164113</pub-id>
</mixed-citation>
</ref>
<ref id="B84">
<label>84</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tang</surname> <given-names>Y</given-names></name>
<name><surname>Qin</surname> <given-names>W</given-names></name>
</person-group>. 
<article-title>Application of multimodal ultrasonography to predicting the acute kidney injury risk of patients with sepsis: artificial intelligence approach</article-title>. <source>PeerJ. Comput Sci</source>. (<year>2024</year>) <volume>10</volume>:<fpage>e2157</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.7717/peerj-cs.2157</pub-id>, PMID: <pub-id pub-id-type="pmid">38983213</pub-id>
</mixed-citation>
</ref>
<ref id="B85">
<label>85</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Elshewey</surname> <given-names>AM</given-names></name>
<name><surname>Selem</surname> <given-names>E</given-names></name>
<name><surname>Abed</surname> <given-names>AH</given-names></name>
</person-group>. 
<article-title>Improved CKD classification based on explainable artificial intelligence with extra trees and BBFS</article-title>. <source>Sci Rep</source>. (<year>2025</year>) <volume>15</volume>:<fpage>17861</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41598-025-02355-7</pub-id>, PMID: <pub-id pub-id-type="pmid">40404758</pub-id>
</mixed-citation>
</ref>
<ref id="B86">
<label>86</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tian</surname> <given-names>S</given-names></name>
<name><surname>Yu</surname> <given-names>Y</given-names></name>
<name><surname>Shi</surname> <given-names>K</given-names></name>
<name><surname>Jiang</surname> <given-names>Y</given-names></name>
<name><surname>Song</surname> <given-names>H</given-names></name>
<name><surname>Wang</surname> <given-names>Y</given-names></name>
<etal/>
</person-group>. 
<article-title>Deep learning radiomics based on ultrasound images for the assisted diagnosis of chronic kidney disease</article-title>. <source>Nephrology</source>. (<year>2024</year>) <volume>29</volume>:<page-range>748&#x2013;57</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/nep.14376</pub-id>, PMID: <pub-id pub-id-type="pmid">39134509</pub-id>
</mixed-citation>
</ref>
<ref id="B87">
<label>87</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhu</surname> <given-names>M</given-names></name>
<name><surname>Ma</surname> <given-names>L</given-names></name>
<name><surname>Yang</surname> <given-names>W</given-names></name>
<name><surname>Tang</surname> <given-names>L</given-names></name>
<name><surname>Li</surname> <given-names>H</given-names></name>
<name><surname>Zheng</surname> <given-names>M</given-names></name>
<etal/>
</person-group>. 
<article-title>Elastography ultrasound with machine learning improves the diagnostic performance of traditional ultrasound in predicting kidney fibrosis</article-title>. <source>J Formosan Med Assoc</source>. (<year>2022</year>) <volume>121</volume>:<page-range>1062&#x2013;72</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jfma.2021.08.011</pub-id>, PMID: <pub-id pub-id-type="pmid">34452784</pub-id>
</mixed-citation>
</ref>
<ref id="B88">
<label>88</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kuo</surname> <given-names>CC</given-names></name>
<name><surname>Chang</surname> <given-names>CM</given-names></name>
<name><surname>Liu</surname> <given-names>KT</given-names></name>
<name><surname>Lin</surname> <given-names>WK</given-names></name>
<name><surname>Chiang</surname> <given-names>HY</given-names></name>
<name><surname>Chung</surname> <given-names>CW</given-names></name>
<etal/>
</person-group>. 
<article-title>Automation of the kidney function prediction and classification through ultrasound-based kidney imaging using deep learning</article-title>. <source>NPJ digital Med</source>. (<year>2019</year>) <volume>2</volume>:<fpage>29</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41746-019-0104-2</pub-id>, PMID: <pub-id pub-id-type="pmid">31304376</pub-id>
</mixed-citation>
</ref>
<ref id="B89">
<label>89</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Alnazer</surname> <given-names>I</given-names></name>
<name><surname>Bourdon</surname> <given-names>P</given-names></name>
<name><surname>Urruty</surname> <given-names>T</given-names></name>
<name><surname>Falou</surname> <given-names>O</given-names></name>
<name><surname>Khalil</surname> <given-names>M</given-names></name>
<name><surname>Shahin</surname> <given-names>A</given-names></name>
<etal/>
</person-group>. 
<article-title>Recent advances in medical image processing for the evaluation of chronic kidney disease</article-title>. <source>Med image Anal</source>. (<year>2021</year>) <volume>69</volume>:<elocation-id>101960</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.media.2021.101960</pub-id>, PMID: <pub-id pub-id-type="pmid">33517241</pub-id>
</mixed-citation>
</ref>
<ref id="B90">
<label>90</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lim</surname> <given-names>WTH</given-names></name>
<name><surname>Ooi</surname> <given-names>EH</given-names></name>
<name><surname>Foo</surname> <given-names>JJ</given-names></name>
<name><surname>Ng</surname> <given-names>KH</given-names></name>
<name><surname>Wong</surname> <given-names>JHD</given-names></name>
<name><surname>Leong</surname> <given-names>SS</given-names></name>
<etal/>
</person-group>. 
<article-title>Shear wave elastography: A review on the confounding factors and their potential mitigation in detecting chronic kidney disease</article-title>. <source>Ultrasound Med Biol</source>. (<year>2021</year>) <volume>47</volume>:<page-range>2033&#x2013;47</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ultrasmedbio.2021.03.030</pub-id>, PMID: <pub-id pub-id-type="pmid">33958257</pub-id>
</mixed-citation>
</ref>
<ref id="B91">
<label>91</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Qiang</surname> <given-names>B</given-names></name>
<name><surname>Xu</surname> <given-names>Q</given-names></name>
<name><surname>Pan</surname> <given-names>Y</given-names></name>
<name><surname>Wang</surname> <given-names>J</given-names></name>
<name><surname>Shen</surname> <given-names>C</given-names></name>
<name><surname>Peng</surname> <given-names>X</given-names></name>
<etal/>
</person-group>. 
<article-title>Shear wave elastography: A noninvasive approach for assessing acute kidney injury in critically ill patients</article-title>. <source>PloS One</source>. (<year>2024</year>) <volume>19</volume>:<fpage>e0296411</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pone.0296411</pub-id>, PMID: <pub-id pub-id-type="pmid">38206919</pub-id>
</mixed-citation>
</ref>
<ref id="B92">
<label>92</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Miller</surname> <given-names>ZA</given-names></name>
<name><surname>Dwyer</surname> <given-names>K</given-names></name>
</person-group>. 
<article-title>Artificial intelligence to predict chronic kidney disease progression to kidney failure: A narrative review</article-title>. <source>Nephrology</source>. (<year>2025</year>) <volume>30</volume>:<fpage>e14424</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/nep.14424</pub-id>, PMID: <pub-id pub-id-type="pmid">39763163</pub-id>
</mixed-citation>
</ref>
<ref id="B93">
<label>93</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Puccinelli</surname> <given-names>C</given-names></name>
<name><surname>Pelligra</surname> <given-names>T</given-names></name>
<name><surname>Lippi</surname> <given-names>I</given-names></name>
<name><surname>Citi</surname> <given-names>S</given-names></name>
</person-group>. 
<article-title>Diagnostic utility of two-dimensional shear wave elastography in nephropathic dogs and its correlation with renal contrast-enhanced ultrasound in course of acute kidney injury</article-title>. <source>J veterinary Med Sci</source>. (<year>2023</year>) <volume>85</volume>:<page-range>1216&#x2013;25</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1292/jvms.23-0065</pub-id>, PMID: <pub-id pub-id-type="pmid">37793837</pub-id>
</mixed-citation>
</ref>
<ref id="B94">
<label>94</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zaky</surname> <given-names>A</given-names></name>
<name><surname>Beck</surname> <given-names>AW</given-names></name>
<name><surname>Bae</surname> <given-names>S</given-names></name>
<name><surname>Sturdivant</surname> <given-names>A</given-names></name>
<name><surname>Liwo</surname> <given-names>A</given-names></name>
<name><surname>Zdenek</surname> <given-names>N</given-names></name>
<etal/>
</person-group>. 
<article-title>The biosonographic index. A novel modality for early detection of acute kidney injury after complex vascular surgery. A protocol for an exploratory prospective study</article-title>. <source>PloS One</source>. (<year>2020</year>) <volume>15</volume>:<fpage>e0241782</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pone.0241782</pub-id>, PMID: <pub-id pub-id-type="pmid">33201924</pub-id>
</mixed-citation>
</ref>
<ref id="B95">
<label>95</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Loftus</surname> <given-names>TJ</given-names></name>
<name><surname>Shickel</surname> <given-names>B</given-names></name>
<name><surname>Ozrazgat-Baslanti</surname> <given-names>T</given-names></name>
<name><surname>Ren</surname> <given-names>Y</given-names></name>
<name><surname>Glicksberg</surname> <given-names>BS</given-names></name>
<name><surname>Cao</surname> <given-names>J</given-names></name>
<etal/>
</person-group>. 
<article-title>Artificial intelligence-enabled decision support in nephrology</article-title>. <source>Nat Rev Nephrol</source>. (<year>2022</year>) <volume>18</volume>:<page-range>452&#x2013;65</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41581-022-00562-3</pub-id>, PMID: <pub-id pub-id-type="pmid">35459850</pub-id>
</mixed-citation>
</ref>
<ref id="B96">
<label>96</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ozcan</surname> <given-names>SGG</given-names></name>
<name><surname>Erkan</surname> <given-names>M</given-names></name>
</person-group>. 
<article-title>Reliability and quality of information provided by artificial intelligence chatbots on postcontrast acute kidney injury: an evaluation of diagnostic, preventive, and treatment guidance</article-title>. <source>Rev da Associacao Med Bras</source>. (<year>2024</year>) <volume>70</volume>:<fpage>e20240891</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1590/1806-9282.20240891</pub-id>, PMID: <pub-id pub-id-type="pmid">39630765</pub-id>
</mixed-citation>
</ref>
<ref id="B97">
<label>97</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Samal</surname> <given-names>L</given-names></name>
<name><surname>Kilgallon</surname> <given-names>JL</given-names></name>
<name><surname>Lipsitz</surname> <given-names>S</given-names></name>
<name><surname>Baer</surname> <given-names>HJ</given-names></name>
<name><surname>McCoy</surname> <given-names>A</given-names></name>
<name><surname>Gannon</surname> <given-names>M</given-names></name>
<etal/>
</person-group>. 
<article-title>Clinical decision support for hypertension management in chronic kidney disease: A randomized clinical trial</article-title>. <source>JAMA Internal Med</source>. (<year>2024</year>) <volume>184</volume>:<page-range>484&#x2013;92</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1001/jamainternmed.2023.8315</pub-id>, PMID: <pub-id pub-id-type="pmid">38466302</pub-id>
</mixed-citation>
</ref>
<ref id="B98">
<label>98</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Sudharson</surname> <given-names>S</given-names></name>
<name><surname>Kokil</surname> <given-names>P</given-names></name>
</person-group>. 
<article-title>An ensemble of deep neural networks for kidney ultrasound image classification</article-title>. <source>Comput Methods programs biomedicine</source>. (<year>2020</year>) <volume>197</volume>:<fpage>105709</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.cmpb.2020.105709</pub-id>, PMID: <pub-id pub-id-type="pmid">32889406</pub-id>
</mixed-citation>
</ref>
<ref id="B99">
<label>99</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Weaver</surname> <given-names>JK</given-names></name>
<name><surname>Logan</surname> <given-names>J</given-names></name>
<name><surname>Broms</surname> <given-names>R</given-names></name>
<name><surname>Antony</surname> <given-names>M</given-names></name>
<name><surname>Rickard</surname> <given-names>M</given-names></name>
<name><surname>Erdman</surname> <given-names>L</given-names></name>
<etal/>
</person-group>. 
<article-title>Deep learning of renal scans in children with antenatal hydronephrosis</article-title>. <source>J Pediatr Urol</source>. (<year>2023</year>) <volume>19</volume>:<fpage>514</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jpurol.2022.12.017</pub-id>, PMID: <pub-id pub-id-type="pmid">36775719</pub-id>
</mixed-citation>
</ref>
<ref id="B100">
<label>100</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jacq</surname> <given-names>A</given-names></name>
<name><surname>Tarris</surname> <given-names>G</given-names></name>
<name><surname>Jaugey</surname> <given-names>A</given-names></name>
<name><surname>Paindavoine</surname> <given-names>M</given-names></name>
<name><surname>Mar&#xe9;chal</surname> <given-names>E</given-names></name>
<name><surname>Bard</surname> <given-names>P</given-names></name>
<etal/>
</person-group>. 
<article-title>Automated evaluation with deep learning of total interstitial inflammation and peritubular capillaritis on kidney biopsies</article-title>. <source>Nephrology dialysis Transplant</source>. (<year>2023</year>) <volume>38</volume>:<page-range>2786&#x2013;98</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/ndt/gfad094</pub-id>, PMID: <pub-id pub-id-type="pmid">37197910</pub-id>
</mixed-citation>
</ref>
<ref id="B101">
<label>101</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>David</surname> <given-names>N</given-names></name>
<name><surname>Horrow</surname> <given-names>MM</given-names></name>
</person-group>. 
<article-title>Pitfalls in renal ultrasound</article-title>. <source>Ultrasound Q</source>. (<year>2020</year>) <volume>36</volume>:<page-range>300&#x2013;13</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1097/RUQ.0000000000000519</pub-id>, PMID: <pub-id pub-id-type="pmid">33298769</pub-id>
</mixed-citation>
</ref>
<ref id="B102">
<label>102</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shehata</surname> <given-names>M</given-names></name>
<name><surname>Abouelkheir</surname> <given-names>RT</given-names></name>
<name><surname>Gayhart</surname> <given-names>M</given-names></name>
<name><surname>Van Bogaert</surname> <given-names>E</given-names></name>
<name><surname>Abou El-Ghar</surname> <given-names>M</given-names></name>
<name><surname>Dwyer</surname> <given-names>AC</given-names></name>
<etal/>
</person-group>. 
<article-title>Role of AI and radiomic markers in early diagnosis of renal cancer and clinical outcome prediction: A brief review</article-title>. <source>Cancers</source>. (<year>2023</year>) <volume>15</volume>:<elocation-id>2835</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/cancers15102835</pub-id>, PMID: <pub-id pub-id-type="pmid">37345172</pub-id>
</mixed-citation>
</ref>
<ref id="B103">
<label>103</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Sheikhy</surname> <given-names>A</given-names></name>
<name><surname>Dehghani Firouzabadi</surname> <given-names>F</given-names></name>
<name><surname>Lay</surname> <given-names>N</given-names></name>
<name><surname>Chaudhri</surname> <given-names>S</given-names></name>
<name><surname>Chandarana</surname> <given-names>H</given-names></name>
<name><surname>Bagga</surname> <given-names>B</given-names></name>
</person-group>. 
<article-title>State of the art review of AI in renal imaging</article-title>. <source>Abdominal Radiol</source>. (<year>2025</year>) <volume>50</volume>:<page-range>5305&#x2013;23</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00261-025-04963-3</pub-id>, PMID: <pub-id pub-id-type="pmid">40293518</pub-id>
</mixed-citation>
</ref>
<ref id="B104">
<label>104</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yin</surname> <given-names>S</given-names></name>
<name><surname>Peng</surname> <given-names>Q</given-names></name>
<name><surname>Li</surname> <given-names>H</given-names></name>
<name><surname>Zhang</surname> <given-names>Z</given-names></name>
<name><surname>You</surname> <given-names>X</given-names></name>
<name><surname>Fischer</surname> <given-names>K</given-names></name>
<etal/>
</person-group>. 
<article-title>Multi-instance deep learning of ultrasound imaging data for pattern classification of congenital abnormalities of the kidney and urinary tract in children</article-title>. <source>Urology</source>. <volume>2020</volume>:<page-range>142:183&#x2013;189</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.urology.2020.05.019</pub-id>, PMID: <pub-id pub-id-type="pmid">32445770</pub-id>
</mixed-citation>
</ref>
<ref id="B105">
<label>105</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Hai</surname> <given-names>J</given-names></name>
<name><surname>Qiao</surname> <given-names>K</given-names></name>
<name><surname>Chen</surname> <given-names>J</given-names></name>
<name><surname>Liang</surname> <given-names>N</given-names></name>
<name><surname>Zhang</surname> <given-names>L</given-names></name>
<name><surname>Yan</surname> <given-names>B</given-names></name>
<etal/>
</person-group>. 
<article-title>Multiview features integrated 2D\3D Net for glomerulopathy histologic types classification using ultrasound images</article-title>. <source>Comput Methods Programs Biomedicine</source>. (<year>2021</year>) <volume>212</volume>:<elocation-id>106439</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.cmpb.2021.106439</pub-id>, PMID: <pub-id pub-id-type="pmid">34695734</pub-id>
</mixed-citation>
</ref>
<ref id="B106">
<label>106</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tsai</surname> <given-names>MC</given-names></name>
<name><surname>Lu</surname> <given-names>HHS</given-names></name>
<name><surname>Chang</surname> <given-names>YC</given-names></name>
<name><surname>Huang</surname> <given-names>YC</given-names></name>
<name><surname>Fu</surname> <given-names>LS</given-names></name>
</person-group>. 
<article-title>Automatic screening of pediatric renal ultrasound abnormalities: deep learning and transfer learning approach</article-title>. <source>JMIR Med Inf</source>. (<year>2022</year>) <volume>10</volume>:<fpage>e40878</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.2196/40878</pub-id>, PMID: <pub-id pub-id-type="pmid">36322109</pub-id>
</mixed-citation>
</ref>
<ref id="B107">
<label>107</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lanza</surname> <given-names>C</given-names></name>
<name><surname>Carriero</surname> <given-names>S</given-names></name>
<name><surname>Biondetti</surname> <given-names>P</given-names></name>
<name><surname>Angileri</surname> <given-names>SA</given-names></name>
<name><surname>Carrafiello</surname> <given-names>G</given-names></name>
<name><surname>Ierardi</surname> <given-names>AM</given-names></name>
<etal/>
</person-group>. 
<article-title>Advances in imaging guidance during percutaneous ablation of renal tumors</article-title>. <source>Semin ultrasound CT MR</source>. (<year>2023</year>) <volume>44</volume>:<page-range>162&#x2013;9</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1053/j.sult.2023.03.003</pub-id>, PMID: <pub-id pub-id-type="pmid">37245882</pub-id>
</mixed-citation>
</ref>
<ref id="B108">
<label>108</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Serhal</surname> <given-names>M</given-names></name>
<name><surname>Rangwani</surname> <given-names>S</given-names></name>
<name><surname>Seedial</surname> <given-names>SM</given-names></name>
<name><surname>Thornburg</surname> <given-names>B</given-names></name>
<name><surname>Riaz</surname> <given-names>A</given-names></name>
<name><surname>Nemcek</surname> <given-names>AA</given-names></name>
<etal/>
</person-group>. 
<article-title>Safety and diagnostic efficacy of image-guided biopsy of small renal masses</article-title>. <source>Cancers</source>. (<year>2024</year>) <volume>16</volume>:<elocation-id>835</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/cancers16040835</pub-id>, PMID: <pub-id pub-id-type="pmid">38398226</pub-id>
</mixed-citation>
</ref>
<ref id="B109">
<label>109</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Sharma</surname> <given-names>NK</given-names></name>
<name><surname>Sarode</surname> <given-names>SC</given-names></name>
</person-group>. 
<article-title>Evolving Artificial Intelligence (AI) at the Crossroads: Potentiating Productive vs</article-title>. <source>Declining Disruptive Cancer Res Cancers</source>. (<year>2024</year>) <volume>16</volume>:<elocation-id>3646</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/cancers16213646</pub-id>, PMID: <pub-id pub-id-type="pmid">39518084</pub-id>
</mixed-citation>
</ref>
<ref id="B110">
<label>110</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Aljabri</surname> <given-names>M</given-names></name>
<name><surname>AlAmir</surname> <given-names>M</given-names></name>
<name><surname>AlGhamdi</surname> <given-names>M</given-names></name>
<name><surname>Abdel-Mottaleb</surname> <given-names>M</given-names></name>
<name><surname>Collado-Mesa</surname> <given-names>F</given-names></name>
</person-group>. 
<article-title>Toward a better understanding of annotation tools for medical imaging: a survey</article-title>. <source>Multimedia Tools Appl</source>. (<year>2022</year>) <volume>81</volume>:<page-range>25877&#x2013;911</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11042-022-12100-1</pub-id>, PMID: <pub-id pub-id-type="pmid">35350630</pub-id>
</mixed-citation>
</ref>
<ref id="B111">
<label>111</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zheng</surname> <given-names>F</given-names></name>
<name><surname>XingMing</surname> <given-names>L</given-names></name>
<name><surname>JuYing</surname> <given-names>X</given-names></name>
<name><surname>MengYing</surname> <given-names>T</given-names></name>
<name><surname>BaoJian</surname> <given-names>Y</given-names></name>
<name><surname>Yan</surname> <given-names>S</given-names></name>
<etal/>
</person-group>. 
<article-title>Significant reduction in manual annotation costs in ultrasound medical image database construction through step by step artificial intelligence preannotation</article-title>. <source>PloS digital Health</source>. (<year>2025</year>) <volume>4</volume>:<fpage>e0000738</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pdig.0000738</pub-id>, PMID: <pub-id pub-id-type="pmid">40587506</pub-id>
</mixed-citation>
</ref>
<ref id="B112">
<label>112</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wu</surname> <given-names>L</given-names></name>
<name><surname>Xia</surname> <given-names>D</given-names></name>
<name><surname>Wang</surname> <given-names>J</given-names></name>
<name><surname>Chen</surname> <given-names>S</given-names></name>
<name><surname>Cui</surname> <given-names>X</given-names></name>
<name><surname>Shen</surname> <given-names>L</given-names></name>
<etal/>
</person-group>. 
<article-title>Deep learning detection and segmentation of facet joints in ultrasound images based on convolutional neural networks and enhanced data annotation</article-title>. <source>Diagnostics</source>. (<year>2024</year>) <volume>14</volume>:<elocation-id>755</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/diagnostics14070755</pub-id>, PMID: <pub-id pub-id-type="pmid">38611668</pub-id>
</mixed-citation>
</ref>
<ref id="B113">
<label>113</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wang</surname> <given-names>Y</given-names></name>
<name><surname>Cheungpasitporn</surname> <given-names>W</given-names></name>
<name><surname>Ali</surname> <given-names>H</given-names></name>
<name><surname>Qing</surname> <given-names>J</given-names></name>
<name><surname>Thongprayoon</surname> <given-names>C</given-names></name>
<name><surname>Kaewput</surname> <given-names>W</given-names></name>
<etal/>
</person-group>. 
<article-title>A practical guide for nephrologist peer reviewers: evaluating artificial intelligence and machine learning research in nephrology</article-title>. <source>Renal failure</source>. (<year>2025</year>) <volume>47</volume>:<elocation-id>2513002</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/0886022X.2025.2513002</pub-id>, PMID: <pub-id pub-id-type="pmid">40620096</pub-id>
</mixed-citation>
</ref>
<ref id="B114">
<label>114</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lee</surname> <given-names>S</given-names></name>
<name><surname>Kang</surname> <given-names>M</given-names></name>
<name><surname>Byeon</surname> <given-names>K</given-names></name>
<name><surname>Lee</surname> <given-names>SE</given-names></name>
<name><surname>Lee</surname> <given-names>IH</given-names></name>
<name><surname>Kim</surname> <given-names>YA</given-names></name>
<etal/>
</person-group>. 
<article-title>Machine learning-aided chronic kidney disease diagnosis based on ultrasound imaging integrated with computer-extracted measurable features</article-title>. <source>J digital Imaging</source>. (<year>2022</year>) <volume>35</volume>:<page-range>1091&#x2013;100</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s10278-022-00625-8</pub-id>, PMID: <pub-id pub-id-type="pmid">35411524</pub-id>
</mixed-citation>
</ref>
<ref id="B115">
<label>115</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Kumar</surname> <given-names>SS</given-names></name>
<name><surname>Khandekar</surname> <given-names>N</given-names></name>
<name><surname>Dani</surname> <given-names>K</given-names></name>
<name><surname>Bhatt</surname> <given-names>SR</given-names></name>
<name><surname>Duddalwar</surname> <given-names>V</given-names></name>
<name><surname>D'Souza</surname> <given-names>A</given-names></name>
<etal/>
</person-group>. 
<article-title>A scoping review of population diversity in the common genomic aberrations of clear cell renal cell carcinoma</article-title>. <source>Oncology</source>. (<year>2025</year>) <volume>103</volume>:<page-range>341&#x2013;50</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1159/000541370</pub-id>, PMID: <pub-id pub-id-type="pmid">39250899</pub-id>
</mixed-citation>
</ref>
<ref id="B116">
<label>116</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Alexa</surname> <given-names>R</given-names></name>
<name><surname>Kranz</surname> <given-names>J</given-names></name>
<name><surname>Kramann</surname> <given-names>R</given-names></name>
<name><surname>Kuppe</surname> <given-names>C</given-names></name>
<name><surname>Sanyal</surname> <given-names>R</given-names></name>
<name><surname>Hayat</surname> <given-names>S</given-names></name>
<etal/>
</person-group>. 
<article-title>Harnessing artificial intelligence for enhanced renal analysis: automated detection of hydronephrosis and precise kidney segmentation</article-title>. <source>Eur Urol Open Sci</source>. (<year>2024</year>) <volume>62</volume>:<fpage>19</fpage>&#x2013;<lpage>25</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.euros.2024.01.017</pub-id>, PMID: <pub-id pub-id-type="pmid">38585207</pub-id>
</mixed-citation>
</ref>
<ref id="B117">
<label>117</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wang</surname> <given-names>J</given-names></name>
<name><surname>Wang</surname> <given-names>K</given-names></name>
<name><surname>Yu</surname> <given-names>Y</given-names></name>
<name><surname>Lu</surname> <given-names>Y</given-names></name>
<name><surname>Xiao</surname> <given-names>W</given-names></name>
<name><surname>Sun</surname> <given-names>Z</given-names></name>
<etal/>
</person-group>. 
<article-title>Self-improving generative foundation model for synthetic medical image generation and clinical applications</article-title>. <source>Nat Med</source>. (<year>2025</year>) <volume>31</volume>:<page-range>609&#x2013;17</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41591-024-03359-y</pub-id>, PMID: <pub-id pub-id-type="pmid">39663467</pub-id>
</mixed-citation>
</ref>
<ref id="B118">
<label>118</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Valerio</surname> <given-names>AG</given-names></name>
<name><surname>Trufanova</surname> <given-names>K</given-names></name>
<name><surname>de Benedictis</surname> <given-names>S</given-names></name>
<name><surname>Vessio</surname> <given-names>G</given-names></name>
<name><surname>Castellano</surname> <given-names>G</given-names></name>
</person-group>. 
<article-title>From segmentation to explanation: Generating textual reports from MRI with LLMs</article-title>. <source>Comput Methods programs biomedicine</source>. (<year>2025</year>) <volume>270</volume>:<elocation-id>108922</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.cmpb.2025.108922</pub-id>, PMID: <pub-id pub-id-type="pmid">40633400</pub-id>
</mixed-citation>
</ref>
<ref id="B119">
<label>119</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yao</surname> <given-names>J</given-names></name>
<name><surname>Wang</surname> <given-names>Y</given-names></name>
<name><surname>Lei</surname> <given-names>Z</given-names></name>
<name><surname>Wang</surname> <given-names>K</given-names></name>
<name><surname>Feng</surname> <given-names>N</given-names></name>
<name><surname>Dong</surname> <given-names>F</given-names></name>
<etal/>
</person-group>. 
<article-title>Multimodal GPT model for assisting thyroid nodule diagnosis and management</article-title>. <source>NPJ digital Med</source>. (<year>2025</year>) <volume>8</volume>:<fpage>245</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41746-025-01652-9</pub-id>, PMID: <pub-id pub-id-type="pmid">40319170</pub-id>
</mixed-citation>
</ref>
<ref id="B120">
<label>120</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Alderden</surname> <given-names>J</given-names></name>
<name><surname>Johnny</surname> <given-names>J</given-names></name>
<name><surname>Brooks</surname> <given-names>KR</given-names></name>
<name><surname>Wilson</surname> <given-names>A</given-names></name>
<name><surname>Yap</surname> <given-names>TL</given-names></name>
<name><surname>Zhao</surname> <given-names>YL</given-names></name>
<etal/>
</person-group>. 
<article-title>Explainable artificial intelligence for early prediction of pressure injury risk</article-title>. <source>Am J Crit Care</source>. (<year>2024</year>) <volume>33</volume>:<page-range>373&#x2013;81</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.4037/ajcc2024856</pub-id>, PMID: <pub-id pub-id-type="pmid">39217110</pub-id>
</mixed-citation>
</ref>
<ref id="B121">
<label>121</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ullah</surname> <given-names>N</given-names></name>
<name><surname>Guzman-Aroca</surname> <given-names>F</given-names></name>
<name><surname>Martinez-Alvarez</surname> <given-names>F</given-names></name>
<name><surname>De Falco</surname> <given-names>I</given-names></name>
<name><surname>Sannino</surname> <given-names>G</given-names></name>
</person-group>. 
<article-title>A novel explainable AI framework for medical image classification integrating statistical, visual, and rule-based methods</article-title>. <source>Med image Anal</source>. (<year>2025</year>) <volume>105</volume>:<elocation-id>103665</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.media.2025.103665</pub-id>, PMID: <pub-id pub-id-type="pmid">40505210</pub-id>
</mixed-citation>
</ref>
<ref id="B122">
<label>122</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Dixit</surname> <given-names>S</given-names></name>
<name><surname>Sharma</surname> <given-names>D</given-names></name>
<name><surname>Sharma</surname> <given-names>N</given-names></name>
<name><surname>Shukla</surname> <given-names>VK</given-names></name>
</person-group>. 
<article-title>A review of software in clinical trials: FDA regulatory frameworks and addressing challenges</article-title>. <source>Rev Recent Clin trials</source>. (<year>2025</year>) <volume>29</volume>:<page-range>1&#x2013;7</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.2174/0115748871359356250523033831</pub-id>, PMID: <pub-id pub-id-type="pmid">40454504</pub-id>
</mixed-citation>
</ref>
<ref id="B123">
<label>123</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Hassan</surname> <given-names>SU</given-names></name>
<name><surname>Abdulkadir</surname> <given-names>SJ</given-names></name>
<name><surname>Zahid</surname> <given-names>MSM</given-names></name>
<name><surname>Al-Selwi</surname> <given-names>SM</given-names></name>
</person-group>. 
<article-title>Local interpretable model-agnostic explanation approach for medical imaging analysis: A systematic literature review</article-title>. <source>Comput Biol Med</source>. (<year>2025</year>) <volume>185</volume>:<elocation-id>109569</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compbiomed.2024.109569</pub-id>, PMID: <pub-id pub-id-type="pmid">39705792</pub-id>
</mixed-citation>
</ref>
<ref id="B124">
<label>124</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Brandenburg</surname> <given-names>JM</given-names></name>
<name><surname>M&#xfc;ller-Stich</surname> <given-names>BP</given-names></name>
<name><surname>Wagner</surname> <given-names>M</given-names></name>
<name><surname>Schaar</surname> <given-names>M</given-names></name>
</person-group>. 
<article-title>Can surgeons trust AI? Perspectives on machine learning in surgery and the importance of eXplainable Artificial Intelligence (XAI)</article-title>. <source>Langenbeck&#x2019;s Arch Surg</source>. (<year>2025</year>) <volume>410</volume>:<fpage>53</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00423-025-03626-7</pub-id>, PMID: <pub-id pub-id-type="pmid">39873858</pub-id>
</mixed-citation>
</ref>
<ref id="B125">
<label>125</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Drukker</surname> <given-names>L</given-names></name>
<name><surname>Noble</surname> <given-names>JA</given-names></name>
<name><surname>Papageorghiou</surname> <given-names>AT</given-names></name>
</person-group>. 
<article-title>Introduction to artificial intelligence in ultrasound imaging in obstetrics and gynecology</article-title>. <source>Ultrasound obstetrics gynecology</source>. (<year>2020</year>) <volume>56</volume>:<fpage>498</fpage>&#x2013;<lpage>505</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/uog.22122</pub-id>, PMID: <pub-id pub-id-type="pmid">32530098</pub-id>
</mixed-citation>
</ref>
<ref id="B126">
<label>126</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xia</surname> <given-names>Q</given-names></name>
<name><surname>Du</surname> <given-names>M</given-names></name>
<name><surname>Li</surname> <given-names>B</given-names></name>
<name><surname>Hou</surname> <given-names>L</given-names></name>
<name><surname>Chen</surname> <given-names>Z</given-names></name>
</person-group>. 
<article-title>Interdisciplinary collaboration opportunities, challenges, and solutions for artificial intelligence in ultrasound</article-title>. <source>Curr Med Imaging</source>. (<year>2022</year>) <volume>18</volume>:<page-range>1046&#x2013;51</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.2174/1573405618666220321123126</pub-id>, PMID: <pub-id pub-id-type="pmid">35319383</pub-id>
</mixed-citation>
</ref>
<ref id="B127">
<label>127</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ostrowski</surname> <given-names>DA</given-names></name>
<name><surname>Logan</surname> <given-names>JR</given-names></name>
<name><surname>Antony</surname> <given-names>M</given-names></name>
<name><surname>Broms</surname> <given-names>R</given-names></name>
<name><surname>Weiss</surname> <given-names>DA</given-names></name>
<name><surname>Van Batavia</surname> <given-names>J</given-names></name>
<etal/>
</person-group>. 
<article-title>Automated Society of Fetal Urology (SFU) grading of hydronephrosis on ultrasound imaging using a convolutional neural network</article-title>. <source>J Pediatr Urol</source>. (<year>2023</year>) <volume>19</volume>:<fpage>566</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jpurol.2023.05.014</pub-id>, PMID: <pub-id pub-id-type="pmid">37286464</pub-id>
</mixed-citation>
</ref>
<ref id="B128">
<label>128</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Liang</surname> <given-names>X</given-names></name>
<name><surname>Du</surname> <given-names>M</given-names></name>
<name><surname>Chen</surname> <given-names>Z</given-names></name>
</person-group>. 
<article-title>Artificial intelligence-aided ultrasound in renal diseases: a systematic review</article-title>. <source>Quantitative Imaging Med Surg</source>. (<year>2023</year>) <volume>13</volume>:<fpage>3988</fpage>&#x2013;<lpage>4001</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.21037/qims-22-1428</pub-id>, PMID: <pub-id pub-id-type="pmid">37284081</pub-id>
</mixed-citation>
</ref>
<ref id="B129">
<label>129</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>&#x160;tev&#xed;k</surname> <given-names>M</given-names></name>
<name><surname>Mal&#xed;k</surname> <given-names>M</given-names></name>
<name><surname>Vete&#x161;kov&#xe1;</surname> <given-names>&#x160;</given-names></name>
<name><surname>Trabalkov&#xe1;</surname> <given-names>Z</given-names></name>
<name><surname>Hlibok&#xfd;</surname> <given-names>M</given-names></name>
<name><surname>Kol&#xe1;rik</surname> <given-names>M</given-names></name>
<etal/>
</person-group>. 
<article-title>Hybrid artificial intelligence solution combining convolutional neural network and analytical approach showed higher accuracy in A-lines detection on lung ultrasound in thoracic surgery patients compared with radiology resident</article-title>. <source>Neuro Endocrinol Lett</source>. (<year>2024</year>) <volume>45</volume>:<page-range>229&#x2013;37</page-range>., PMID: <pub-id pub-id-type="pmid">39146568</pub-id>
</mixed-citation>
</ref>
<ref id="B130">
<label>130</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Bai</surname> <given-names>L</given-names></name>
<name><surname>Liu</surname> <given-names>M</given-names></name>
<name><surname>Sun</surname> <given-names>Y</given-names></name>
</person-group>. 
<article-title>Overview of food preservation and traceability technology in the smart cold chain system</article-title>. <source>Foods</source>. (<year>2023</year>) <volume>12</volume>:<elocation-id>2881</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/foods12152881</pub-id>, PMID: <pub-id pub-id-type="pmid">37569150</pub-id>
</mixed-citation>
</ref>
<ref id="B131">
<label>131</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shaikh</surname> <given-names>F</given-names></name>
<name><surname>Kenny</surname> <given-names>JE</given-names></name>
<name><surname>Awan</surname> <given-names>O</given-names></name>
<name><surname>Markovic</surname> <given-names>D</given-names></name>
<name><surname>Friedman</surname> <given-names>O</given-names></name>
<name><surname>He</surname> <given-names>T</given-names></name>
<etal/>
</person-group>. 
<article-title>Measuring the accuracy of cardiac output using POCUS: the introduction of artificial intelligence into routine care</article-title>. <source>ultrasound J</source>. (<year>2022</year>) <volume>14</volume>:<fpage>47</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/s13089-022-00301-6</pub-id>, PMID: <pub-id pub-id-type="pmid">36517635</pub-id>
</mixed-citation>
</ref>
<ref id="B132">
<label>132</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Thomas</surname> <given-names>J</given-names></name>
<name><surname>Ledger</surname> <given-names>GA</given-names></name>
<name><surname>Mamillapalli</surname> <given-names>CK</given-names></name>
</person-group>. 
<article-title>Use of artificial intelligence and machine learning for estimating Malignancy risk of thyroid nodules</article-title>. <source>Curr Opin endocrinology diabetes Obes</source>. (<year>2020</year>) <volume>27</volume>:<page-range>345&#x2013;50</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1097/MED.0000000000000557</pub-id>, PMID: <pub-id pub-id-type="pmid">32740044</pub-id>
</mixed-citation>
</ref>
<ref id="B133">
<label>133</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xu</surname> <given-names>C</given-names></name>
<name><surname>Wang</surname> <given-names>Z</given-names></name>
<name><surname>Zhou</surname> <given-names>J</given-names></name>
<name><surname>Hu</surname> <given-names>F</given-names></name>
<name><surname>Wang</surname> <given-names>Y</given-names></name>
<name><surname>Xu</surname> <given-names>Z</given-names></name>
<etal/>
</person-group>. 
<article-title>Application research of artificial intelligence software in the analysis of thyroid nodule ultrasound image characteristics</article-title>. <source>PloS One</source>. (<year>2025</year>) <volume>20</volume>:<fpage>e0323343</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1371/journal.pone.0323343</pub-id>, PMID: <pub-id pub-id-type="pmid">40455930</pub-id>
</mixed-citation>
</ref>
<ref id="B134">
<label>134</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Muralidharan</surname> <given-names>V</given-names></name>
<name><surname>Adewale</surname> <given-names>BA</given-names></name>
<name><surname>Huang</surname> <given-names>CJ</given-names></name>
<name><surname>Nta</surname> <given-names>MT</given-names></name>
<name><surname>Ademiju</surname> <given-names>PO</given-names></name>
<name><surname>Pathmarajah</surname> <given-names>P</given-names></name>
<etal/>
</person-group>. 
<article-title>A scoping review of reporting gaps in FDA-approved AI medical devices</article-title>. <source>NPJ digital Med</source>. (<year>2024</year>) <volume>7</volume>:<fpage>273</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41746-024-01270-x</pub-id>, PMID: <pub-id pub-id-type="pmid">39362934</pub-id>
</mixed-citation>
</ref>
<ref id="B135">
<label>135</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tepe</surname> <given-names>M</given-names></name>
<name><surname>Emekli</surname> <given-names>E</given-names></name>
</person-group>. 
<article-title>Assessing the responses of large language models (ChatGPT-4, gemini, and microsoft copilot) to frequently asked questions in breast imaging: A study on readability and accuracy</article-title>. <source>Cureus</source>. (<year>2024</year>) <volume>16</volume>:<fpage>e59960</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.7759/cureus.59960</pub-id>, PMID: <pub-id pub-id-type="pmid">38726360</pub-id>
</mixed-citation>
</ref>
<ref id="B136">
<label>136</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Cacciamani</surname> <given-names>GE</given-names></name>
<name><surname>Chen</surname> <given-names>A</given-names></name>
<name><surname>Gill</surname> <given-names>IS</given-names></name>
<name><surname>Hung</surname> <given-names>AJ</given-names></name>
</person-group>. 
<article-title>Artificial intelligence and urology: ethical considerations for urologists and patients</article-title>. <source>Nat Rev Urol</source>. (<year>2024</year>) <volume>21</volume>:<page-range>50&#x2013;9</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41585-023-00796-1</pub-id>, PMID: <pub-id pub-id-type="pmid">37524914</pub-id>
</mixed-citation>
</ref>
<ref id="B137">
<label>137</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jiang</surname> <given-names>L</given-names></name>
<name><surname>Wu</surname> <given-names>Z</given-names></name>
<name><surname>Xu</surname> <given-names>X</given-names></name>
<name><surname>Zhan</surname> <given-names>Y</given-names></name>
<name><surname>Jin</surname> <given-names>X</given-names></name>
<name><surname>Wang</surname> <given-names>L</given-names></name>
<etal/>
</person-group>. 
<article-title>Opportunities and challenges of artificial intelligence in the medical field: current application, emerging problems, and problem-solving strategies</article-title>. <source>J Int Med Res</source>. (<year>2021</year>) <volume>49</volume>:<elocation-id>3000605211000157</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1177/03000605211000157</pub-id>, PMID: <pub-id pub-id-type="pmid">33771068</pub-id>
</mixed-citation>
</ref>
<ref id="B138">
<label>138</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Saber</surname> <given-names>A</given-names></name>
<name><surname>Hassan</surname> <given-names>E</given-names></name>
<name><surname>Elbedwehy</surname> <given-names>S</given-names></name>
<name><surname>Awad</surname> <given-names>WA</given-names></name>
<name><surname>Emara</surname> <given-names>TZ</given-names></name>
</person-group>. 
<article-title>Leveraging ensemble convolutional neural networks and metaheuristic strategies for advanced kidney disease screening and classification</article-title>. <source>Sci Rep</source>. (<year>2025</year>) <volume>15</volume>:<fpage>2487</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41598-025-93950-1</pub-id>, PMID: <pub-id pub-id-type="pmid">40216822</pub-id>
</mixed-citation>
</ref>
<ref id="B139">
<label>139</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhao</surname> <given-names>D</given-names></name>
<name><surname>Wang</surname> <given-names>W</given-names></name>
<name><surname>Tang</surname> <given-names>T</given-names></name>
<name><surname>Zhang</surname> <given-names>YY</given-names></name>
<name><surname>Yu</surname> <given-names>C</given-names></name>
</person-group>. 
<article-title>Current progress in artificial intelligence-assisted medical image analysis for chronic kidney disease: A literature review</article-title>. <source>Comput Struct Biotechnol J</source>. (<year>2023</year>) <volume>21</volume>:<page-range>3315&#x2013;326</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.csbj.2023.05.029</pub-id>, PMID: <pub-id pub-id-type="pmid">37333860</pub-id>
</mixed-citation>
</ref>
<ref id="B140">
<label>140</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Amin</surname> <given-names>MS</given-names></name>
<name><surname>Ahmad</surname> <given-names>S</given-names></name>
<name><surname>Loh</surname> <given-names>WK</given-names></name>
</person-group>. 
<article-title>Federated learning for Healthcare 5.0: a comprehensive survey, taxonomy, challenges, and solutions</article-title>. <source>Soft Computing</source>. (<year>2025</year>) <volume>29</volume>:<fpage>673</fpage>&#x2013;<lpage>700</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00500-025-10508-z</pub-id>
</mixed-citation>
</ref>
<ref id="B141">
<label>141</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Almogadwy</surname> <given-names>B</given-names></name>
<name><surname>Alqarafi</surname> <given-names>A</given-names></name>
</person-group>. 
<article-title>Fused federated learning framework for secure and decentralized patient monitoring in healthcare 5</article-title>. <source>0 using IoMT. Sci Rep</source>. (<year>2025</year>) <volume>15</volume>:<fpage>24263</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41598-025-06574-w</pub-id>, PMID: <pub-id pub-id-type="pmid">40624105</pub-id>
</mixed-citation>
</ref>
<ref id="B142">
<label>142</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Durant</surname> <given-names>AM</given-names></name>
<name><surname>Medero</surname> <given-names>RC</given-names></name>
<name><surname>Briggs</surname> <given-names>LG</given-names></name>
<name><surname>Choudry</surname> <given-names>MM</given-names></name>
<name><surname>Nguyen</surname> <given-names>M</given-names></name>
<name><surname>Channar</surname> <given-names>A</given-names></name>
<etal/>
</person-group>. 
<article-title>The current application and future potential of artificial intelligence in renal cancer</article-title>. <source>Urology</source>. (<year>2024</year>) <volume>193</volume>:<page-range>157&#x2013;63</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.urology.2024.07.010</pub-id>, PMID: <pub-id pub-id-type="pmid">39029807</pub-id>
</mixed-citation>
</ref>
<ref id="B143">
<label>143</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shiraga</surname> <given-names>T</given-names></name>
<name><surname>Makimoto</surname> <given-names>H</given-names></name>
<name><surname>Kohlmann</surname> <given-names>B</given-names></name>
<name><surname>Magnisali</surname> <given-names>CE</given-names></name>
<name><surname>Imai</surname> <given-names>Y</given-names></name>
<name><surname>Itani</surname> <given-names>Y</given-names></name>
<etal/>
</person-group>. 
<article-title>Improving valvular pathologies and ventricular dysfunction diagnostic efficiency using combined auscultation and electrocardiography data: A multimodal AI approach</article-title>. <source>Sensors</source>. (<year>2023</year>) <volume>23</volume>:<elocation-id>9834</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s23249834</pub-id>, PMID: <pub-id pub-id-type="pmid">38139680</pub-id>
</mixed-citation>
</ref>
<ref id="B144">
<label>144</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Han</surname> <given-names>Z</given-names></name>
<name><surname>Huang</surname> <given-names>Y</given-names></name>
<name><surname>Wang</surname> <given-names>H</given-names></name>
<name><surname>Chu</surname> <given-names>Z</given-names></name>
</person-group>. 
<article-title>Multimodal ultrasound imaging: A method to improve the accuracy of diagnosing thyroid TI-RADS 4 nodules</article-title>. <source>J Clin ultrasound</source>. (<year>2022</year>) <volume>50</volume>:<page-range>1345&#x2013;52</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/jcu.23352</pub-id>, PMID: <pub-id pub-id-type="pmid">36169185</pub-id>
</mixed-citation>
</ref>
<ref id="B145">
<label>145</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jiang</surname> <given-names>J</given-names></name>
<name><surname>Chan</surname> <given-names>L</given-names></name>
<name><surname>Nadkarni</surname> <given-names>GN</given-names></name>
</person-group>. 
<article-title>The promise of artificial intelligence for kidney pathophysiology</article-title>. <source>Curr Opin Nephrol hypertension</source>. (<year>2022</year>) <volume>31</volume>:<page-range>380&#x2013;6</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1097/MNH.0000000000000808</pub-id>, PMID: <pub-id pub-id-type="pmid">35703218</pub-id>
</mixed-citation>
</ref>
<ref id="B146">
<label>146</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Huo</surname> <given-names>Y</given-names></name>
<name><surname>Deng</surname> <given-names>R</given-names></name>
<name><surname>Liu</surname> <given-names>Q</given-names></name>
<name><surname>Fogo</surname> <given-names>AB</given-names></name>
<name><surname>Yang</surname> <given-names>H</given-names></name>
</person-group>. 
<article-title>AI applications in renal pathology</article-title>. <source>Kidney Int</source>. (<year>2021</year>) <volume>99</volume>:<page-range>1309&#x2013;20</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.kint.2021.01.015</pub-id>, PMID: <pub-id pub-id-type="pmid">33581198</pub-id>
</mixed-citation>
</ref>
<ref id="B147">
<label>147</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhou</surname> <given-names>XJ</given-names></name>
<name><surname>Zhong</surname> <given-names>XH</given-names></name>
<name><surname>Duan</surname> <given-names>LX</given-names></name>
</person-group>. 
<article-title>Integration of artificial intelligence and multi-omics in kidney diseases</article-title>. <source>Fundam Res</source>. (<year>2022</year>) <volume>3</volume>:<page-range>126&#x2013;48</page-range>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.fmre.2022.01.037</pub-id>, PMID: <pub-id pub-id-type="pmid">38933564</pub-id>
</mixed-citation>
</ref>
<ref id="B148">
<label>148</label>
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lin</surname> <given-names>Z</given-names></name>
<name><surname>Li</surname> <given-names>S</given-names></name>
<name><surname>Wang</surname> <given-names>S</given-names></name>
<name><surname>Gao</surname> <given-names>Z</given-names></name>
<name><surname>Sun</surname> <given-names>Y</given-names></name>
<name><surname>Lam</surname> <given-names>CT</given-names></name>
<etal/>
</person-group>. 
<article-title>An orchestration learning framework for ultrasound imaging: Prompt-Guided Hyper-Perception and Attention-Matching Downstream Synchronization</article-title>. <source>Med image Anal</source>. (<year>2025</year>) <volume>104</volume>:<elocation-id>103639</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.media.2025.103639</pub-id>, PMID: <pub-id pub-id-type="pmid">40441046</pub-id>
</mixed-citation>
</ref>
</ref-list>
<fn-group>
<fn id="n1" fn-type="custom" custom-type="edited-by">
<p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/38262">Ronald M. Bukowski</ext-link>, Cleveland Clinic, United States</p></fn>
<fn id="n2" fn-type="custom" custom-type="reviewed-by">
<p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/1721651">Chen Yu</ext-link>, Tongji University, China</p>
<p><ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2246557">Bartosz Malkiewicz</ext-link>, Wroclaw Medical University, Poland</p></fn>
</fn-group>
</back>
</article>