<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Med.</journal-id>
<journal-title>Frontiers in Medicine</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Med.</abbrev-journal-title>
<issn pub-type="epub">2296-858X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fmed.2022.894430</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Medicine</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Building Efficient CNN Architectures for Histopathology Images Analysis: A Case-Study in Tumor-Infiltrating Lymphocytes Classification</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Meirelles</surname> <given-names>Andr&#x000E9; L. S.</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1717746/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Kurc</surname> <given-names>Tahsin</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Kong</surname> <given-names>Jun</given-names></name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/907505/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Ferreira</surname> <given-names>Renato</given-names></name>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Saltz</surname> <given-names>Joel H.</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Teodoro</surname> <given-names>George</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x0002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1532880/overview"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Department of Computer Science, Universidade de Bras&#x000ED;lia</institution>, <addr-line>Bras&#x000ED;lia</addr-line>, <country>Brazil</country></aff>
<aff id="aff2"><sup>2</sup><institution>Biomedical Informatics Department, Stony Brook University</institution>, <addr-line>Stony Brook, NY</addr-line>, <country>United States</country></aff>
<aff id="aff3"><sup>3</sup><institution>Department of Mathematics and Statistics and Computer Science, Georgia State University</institution>, <addr-line>Atlanta, GA</addr-line>, <country>United States</country></aff>
<aff id="aff4"><sup>4</sup><institution>Department of Computer Science, Universidade Federal de Minas Gerais</institution>, <addr-line>Belo Horizonte</addr-line>, <country>Brazil</country></aff>
<author-notes>
<fn fn-type="edited-by"><p>Edited by: Dachuan Zhang, First People&#x00027;s Hospital of Changzhou, China</p></fn>
<fn fn-type="edited-by"><p>Reviewed by: Guotai Wang, University of Electronic Science and Technology of China, China; Arkadiusz Gertych, Cedars Sinai Medical Center, United States</p></fn>
<corresp id="c001">&#x0002A;Correspondence: George Teodoro <email>george&#x00040;dcc.ufmg.br</email></corresp>
<fn fn-type="other" id="fn001"><p>This article was submitted to Pathology, a section of the journal Frontiers in Medicine</p></fn></author-notes>
<pub-date pub-type="epub">
<day>31</day>
<month>05</month>
<year>2022</year>
</pub-date>
<pub-date pub-type="collection">
<year>2022</year>
</pub-date>
<volume>9</volume>
<elocation-id>894430</elocation-id>
<history>
<date date-type="received">
<day>11</day>
<month>03</month>
<year>2022</year>
</date>
<date date-type="accepted">
<day>11</day>
<month>05</month>
<year>2022</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x000A9; 2022 Meirelles, Kurc, Kong, Ferreira, Saltz and Teodoro.</copyright-statement>
<copyright-year>2022</copyright-year>
<copyright-holder>Meirelles, Kurc, Kong, Ferreira, Saltz and Teodoro</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p></license></permissions>
<abstract>
<sec>
<title>Background</title>
<p>Deep learning methods have demonstrated remarkable performance in pathology image analysis, but they are computationally very demanding. The aim of our study is to reduce their computational cost to enable their use with large tissue image datasets.</p>
</sec>
<sec>
<title>Methods</title>
<p>We propose a method called Network Auto-Reduction (NAR) that simplifies a Convolutional Neural Network (CNN) by reducing the network to minimize the computational cost of doing a prediction. NAR performs a compound scaling in which the width, depth, and resolution dimensions of the network are reduced together to maintain a balance among them in the resulting simplified network. We compare our method with a state-of-the-art solution called ResRep. The evaluation is carried out with popular CNN architectures and a real-world application that identifies distributions of tumor-infiltrating lymphocytes in tissue images.</p>
</sec>
<sec>
<title>Results</title>
<p>The experimental results show that both ResRep and NAR are able to generate simplified, more efficient versions of ResNet50 V2. The simplified versions by ResRep and NAR require 1.32&#x000D7; and 3.26&#x000D7; fewer floating-point operations (FLOPs), respectively, than the original network without a loss in classification power as measured by the Area under the Curve (AUC) metric. When applied to a deeper and more computationally expensive network, Inception V4, NAR is able to generate a version that requires 4&#x000D7; lower than the original version with the same AUC performance.</p>
</sec>
<sec>
<title>Conclusions</title>
<p>NAR is able to achieve substantial reductions in the execution cost of two popular CNN architectures, while resulting in small or no loss in model accuracy. Such cost savings can significantly improve the use of deep learning methods in digital pathology. They can enable studies with larger tissue image datasets and facilitate the use of less expensive and more accessible graphics processing units (GPUs), thus reducing the computing costs of a study.</p>
</sec></abstract>
<kwd-group>
<kwd>digital pathology</kwd>
<kwd>deep learning</kwd>
<kwd>CNN simplification</kwd>
<kwd>tumor-infiltrating lymphocytes</kwd>
<kwd>efficient CNNs</kwd>
</kwd-group>
<contract-num rid="cn001">1UG3CA225021</contract-num>
<contract-num rid="cn003">R01LM011119-01</contract-num>
<contract-num rid="cn003">R01LM009239</contract-num>
<contract-num rid="cn007">K25CA181503</contract-num>
<contract-num rid="cn007">U01CA242936</contract-num>
<contract-sponsor id="cn001">National Cancer Institute<named-content content-type="fundref-id">10.13039/100000054</named-content></contract-sponsor>
<contract-sponsor id="cn002">Funda&#x000E7;&#x000E3;o de Amparo &#x000E0; Pesquisa do Estado de Minas Gerais<named-content content-type="fundref-id">10.13039/501100004901</named-content></contract-sponsor>
<contract-sponsor id="cn003">U.S. National Library of Medicine<named-content content-type="fundref-id">10.13039/100000092</named-content></contract-sponsor>
<contract-sponsor id="cn004">Conselho Nacional de Desenvolvimento Cient&#x000ED;fico e Tecnol&#x000F3;gico<named-content content-type="fundref-id">10.13039/501100003593</named-content></contract-sponsor>
<contract-sponsor id="cn005">Coordena&#x000E7;&#x000E3;o de Aperfei&#x000E7;oamento de Pessoal de N&#x000ED;vel Superior<named-content content-type="fundref-id">10.13039/501100002322</named-content></contract-sponsor>
<contract-sponsor id="cn006">Pr&#x000F3;-Reitoria de Pesquisa, Universidade Federal de Minas Gerais<named-content content-type="fundref-id">10.13039/501100007375</named-content></contract-sponsor>
<contract-sponsor id="cn007">National Institutes of Health<named-content content-type="fundref-id">10.13039/100000002</named-content></contract-sponsor>
<counts>
<fig-count count="2"/>
<table-count count="4"/>
<equation-count count="4"/>
<ref-count count="60"/>
<page-count count="10"/>
<word-count count="7704"/>
</counts>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="s1">
<title>1. Introduction</title>
<p>Pathology image analysis is quickly evolving thanks to advances in scanner technologies that now enable rapidly digitizing glass slides into high resolution whole slide images (WSIs). This has also been followed by several developments in computer aided diagnosis analysis tools and methods, which have improved the use of information computed from tissue characteristics in WSIs in disease classification, prediction of clinical outcomes, etc. (<xref ref-type="bibr" rid="B1">1</xref>&#x02013;<xref ref-type="bibr" rid="B3">3</xref>). Deep learning methods have demonstrated significant improvements over traditional machine learning and other image analysis methods in a wide range of tissue image analysis tasks (<xref ref-type="bibr" rid="B4">4</xref>&#x02013;<xref ref-type="bibr" rid="B10">10</xref>). Consequently, deep learning-based image analysis is rapidly becoming a mainstream approach in digital pathology.</p>
<p>The advances attained with the deep learning methods have also been accompanied by multiple challenges in order to make them more routinely used in pathology image analysis. For instance, these methods require a significant amount of annotated data to be used in training, which is particularly costly in digital pathology as it requires an expert pathologist to manually annotate large volumes of data (<xref ref-type="bibr" rid="B11">11</xref>, <xref ref-type="bibr" rid="B12">12</xref>). Also, applications developed with deep learning should consider explainability to improve confidence in their use (<xref ref-type="bibr" rid="B13">13</xref>, <xref ref-type="bibr" rid="B14">14</xref>).</p>
<p>We address another challenge with application of deep learning in digital pathology; the high computational cost of deep learning inference, which has adversely impacted the effective use of deep learning in many application domains (<xref ref-type="bibr" rid="B15">15</xref>). This problem is particularly more pronounced in digital pathology because WSIs are extremely high resolution images (in the range of 100K&#x000D7; 100K pixels). A study analyzing thousands of WSIs would require substantial computing capacity. High computing requirements can significantly limit the use of deep learning in research and as a routine component of digital pathology workflows.</p>
<p>The demanding computational costs of deep learning models can be addressed by CNN simplification and acceleration techniques, such as: network pruning (<xref ref-type="bibr" rid="B16">16</xref>&#x02013;<xref ref-type="bibr" rid="B18">18</xref>), sparsification (<xref ref-type="bibr" rid="B19">19</xref>, <xref ref-type="bibr" rid="B20">20</xref>), quantization (<xref ref-type="bibr" rid="B21">21</xref>, <xref ref-type="bibr" rid="B22">22</xref>), etc. Among network pruning solutions, there are those that concentrate on removing filters in the convolutional layers, which are referred to as channel or filter pruning (<xref ref-type="bibr" rid="B23">23</xref>&#x02013;<xref ref-type="bibr" rid="B25">25</xref>). Other techniques act on a broader range of structures, removing full layers or even blocks of layers (<xref ref-type="bibr" rid="B26">26</xref>).</p>
<p>Network pruning solutions have been the focus of a number of publications, presenting good results in CNN speedup and also enabling lossless model compression (<xref ref-type="bibr" rid="B27">27</xref>). Filter pruning techniques and network pruning in general offer varying possibilities to select which filters from which layers should be excluded from the network or which structures to be removed. However, this is not performed in a balanced manner taking into consideration all model dimensions together, which may limit the performance and accuracy of the reduced network (<xref ref-type="bibr" rid="B28">28</xref>&#x02013;<xref ref-type="bibr" rid="B30">30</xref>).</p>
<p>In this work, we present a novel approach that can generate more efficient Convolutional Neural Network (CNN) architectures to speed up the execution of model training and inference. Our approach, called Network Auto-Reduction (NAR), performs transformations in a given CNN architecture in order to reduce its width, depth, and resolution dimensions (also called components) to generate a novel architecture with the desired computational cost (in terms of number of FLOPs) and with minimal loss of accuracy. This simplification employs a compound scaling method with a set of fixed scaling coefficients. The goal is to maintain a balance among the components of the network&#x02014;for instance, a larger input resolution would require more receptive fields and a larger number of channels to capture details of the input image as is theoretically shown in (<xref ref-type="bibr" rid="B28">28</xref>). NAR differs from most of the previous works that focus on reducing a single or a couple of the dimensions of the network (<xref ref-type="bibr" rid="B25">25</xref>, <xref ref-type="bibr" rid="B27">27</xref>, <xref ref-type="bibr" rid="B29">29</xref>, <xref ref-type="bibr" rid="B31">31</xref>&#x02013;<xref ref-type="bibr" rid="B33">33</xref>).</p>
<p>We experimentally evaluate our approach in a real-world application that classifies tumor-infiltrating lymphocytes (TILs) in WSIs (<xref ref-type="bibr" rid="B34">34</xref>, <xref ref-type="bibr" rid="B35">35</xref>) (presented in Section 2.1). TILs are a type of white blood cells in the immune system, whose patterns found in the tissue images have been shown to have consistent correlations with patient overall survival in multiple cancer types (<xref ref-type="bibr" rid="B36">36</xref>&#x02013;<xref ref-type="bibr" rid="B40">40</xref>). In our evaluation, we use ResNet50 V2 and Inception V4 as full, baseline networks and simplify them with NAR. We compare NAR to a state-of-the-art method, called ResRep (<xref ref-type="bibr" rid="B27">27</xref>). ResRep is designed to carry out lossless channel pruning (filter pruning) to slim down a CNN through a reduction in the width or number of output channels of convolutional layers. The experimental evaluation shows that NAR can generate CNNs with demands up to 4 &#x000D7; lower than the original CNN, while delivering the same classification quality (AUC). The simplified networks generated by NAR are more efficient, with smaller requirements for the same AUC values when compared with the networks generated by ResRep.</p>
<p>The rest of this document is organized as follows: Section 2 presents the motivating TIL classification application, the NAR strategy proposed here and summarizes the ResRep approach. Section 3 shows the performance evaluation in detail and Section 4 discusses the main finds and promising directions for future work.</p>
</sec>
<sec sec-type="materials and methods" id="s2">
<title>2. Materials and Methods</title>
<sec>
<title>2.1. Tumor-Infiltrating Lymphocytes (TIL) Classification Using Deep Learning</title>
<p>This work is motivated by analyses carried with deep learning models of WSIs to identify and classify spatial patterns of TILs (<xref ref-type="bibr" rid="B34">34</xref>, <xref ref-type="bibr" rid="B35">35</xref>). There is increasing evidence that TIL patterns in cancer tissue correlate with clinical outcomes; for example, high densities of TILs indicate favorable outcomes, such as longer survivals for patients (<xref ref-type="bibr" rid="B37">37</xref>). Quantitative analyses of TIL patterns can provide valuable information about interactions between cancer and immune system and novel bio-markers for prediction of prognosis and treatment response.</p>
<p>WSIs allow a researcher to carry out quantitative investigations of the tumor microenvironment at the subcellular level. This has motivated the development of image analysis methods to extract and characterize quantitative imaging features from WSIs (<xref ref-type="bibr" rid="B1">1</xref>&#x02013;<xref ref-type="bibr" rid="B3">3</xref>, <xref ref-type="bibr" rid="B41">41</xref>, <xref ref-type="bibr" rid="B42">42</xref>). Deep learning methods based on Convolutional Neural Networks (CNNs) have emerged as an effective approach for image analysis in several domains. CNNs have been employed for a variety of tissue image analysis tasks, including object identification, segmentation, and recognition of spatial patterns (<xref ref-type="bibr" rid="B34">34</xref>, <xref ref-type="bibr" rid="B43">43</xref>&#x02013;<xref ref-type="bibr" rid="B49">49</xref>).</p>
<p><xref ref-type="fig" rid="F1">Figure 1</xref> shows a TIL analysis pipeline, based on the work done in (<xref ref-type="bibr" rid="B34">34</xref>), that predicts distributions of TILs in images of hematoxylin and eosin (H&#x00026;E) stained tissue specimens. In this pipeline, an input image is partitioned into small patches&#x02014;the size of a patch is 50 &#x000D7; 50 square microns in our application. A CNN classification model classifies the patches into TIL-positive and TIL-negative classes (a binary classification operation). As is shown in the figure, the pipeline is composed of a training phase and a prediction phase. In the training phase (shown in the top), the CNN learns to classify input image patches. In this process, patches are extracted from multiple WSIs, pathologists review and annotate them, and the CNN classification model is trained. The selection of patches and model training is repeated until the desired accuracy level is reached. The prediction phase (bottom part of the image) applies the trained model to input patches from unseen WSIs to compute TIL maps that identify tissue regions with TILs&#x02014;TIL-positive patches are shown as Red dots on a Blue background, which represents tissue.</p>
<fig id="F1" position="float">
<label>Figure 1</label>
<caption><p>Use-case TIL analysis workflow. CNN is trained to identify TIL rich tissue based on patches annotated by expert pathologist (top). The CNN model is then used to classify input WSI in a patch basis. The result is a TIL map presenting TIL rich regions in the input tissue.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fmed-09-894430-g0001.tif"/>
</fig>
<p>While CNNs have been applied successfully for TIL analysis (<xref ref-type="bibr" rid="B34">34</xref>, <xref ref-type="bibr" rid="B35">35</xref>), scaling the analysis to thousands of WSIs is challenging, because of the CNNs high computational cost. This poses a major limitation to a broader adoption of CNN-based methods in the digital pathology domain. We propose a method that intelligently simplifies a CNN to reduce its computational cost while minimizing loss of model accuracy. The proposed method is discussed in the next section.</p>
</sec>
<sec>
<title>2.2. Network Auto-Reduction (NAR)</title>
<p>We propose Network Auto-Reduction (NAR) to simplify CNNs and reduce their execution cost in the inference (prediction) phase. Several approaches have been proposed for CNN simplification. Most of the prior approaches aim to reduce one of the dimensions of the CNN: depth, width or input resolution (<xref ref-type="bibr" rid="B27">27</xref>). Some studies proposed removing specific CNN filters (<xref ref-type="bibr" rid="B25">25</xref>, <xref ref-type="bibr" rid="B29">29</xref>, <xref ref-type="bibr" rid="B31">31</xref>&#x02013;<xref ref-type="bibr" rid="B33">33</xref>), or introducing weight sparsity (<xref ref-type="bibr" rid="B18">18</xref>) or applying a combination of both (<xref ref-type="bibr" rid="B26">26</xref>, <xref ref-type="bibr" rid="B27">27</xref>). In most of those cases, the CNN is re-trained multiple times while the reduction operations are iteratively applied. This is computationally expensive and may not even be feasible in applications that employ large training datasets.</p>
<p>NAR simplifies a CNN by modifying the depth, width, and input resolution of the model together. The goal is to maintain a balance between network building blocks in order for the simplified CNN to attain good accuracy, as demonstrated in previous work (<xref ref-type="bibr" rid="B28">28</xref>, <xref ref-type="bibr" rid="B50">50</xref>, <xref ref-type="bibr" rid="B51">51</xref>). The compound simplification process is illustrated in <xref ref-type="fig" rid="F2">Figure 2</xref>.</p>
<fig id="F2" position="float">
<label>Figure 2</label>
<caption><p>NAR compound CNN simplification modifies depth, width, and input resolution in order to have a balance among CNN components.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fmed-09-894430-g0002.tif"/>
</fig>
<p>Our method is inspired by the approach proposed by Tan et al. (<xref ref-type="bibr" rid="B52">52</xref>) to scale up simple CNNs. Their method was designed to increase the size of a simple CNN in order to improve its prediction performance. Here, on the other hand, we address the problem of simplifying a CNN that is already known to perform well in the target domain, but has a high computation cost. Tan et al. (<xref ref-type="bibr" rid="B52">52</xref>) formulated the problem of scaling-up a CNN as an optimization problem defined in Equation (1), given a target memory consumption (TM) and (TF):</p>
<disp-formula id="E1"><label>(1)</label><mml:math id="M1"><mml:mtable columnalign='left'><mml:mtr><mml:mtd><mml:mtext>&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;</mml:mtext><mml:mi>m</mml:mi><mml:mi>a</mml:mi><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>d</mml:mi><mml:mo>,</mml:mo><mml:mi>w</mml:mi><mml:mo>,</mml:mo><mml:mi>r</mml:mi></mml:mrow></mml:msub><mml:mtext>&#x02009;&#x02009;</mml:mtext><mml:mi>A</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:mi>u</mml:mi><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>y</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:mi>M</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:mi>d</mml:mi><mml:mo>,</mml:mo><mml:mi>w</mml:mi><mml:mo>,</mml:mo><mml:mi>r</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mo stretchy='false'>)</mml:mo></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mtext>&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;</mml:mtext><mml:mi>s</mml:mi><mml:mo>.</mml:mo><mml:mi>t</mml:mi><mml:mtext>&#x02009;&#x02009;</mml:mtext><mml:mi>M</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:mi>d</mml:mi><mml:mo>,</mml:mo><mml:mi>w</mml:mi><mml:mo>,</mml:mo><mml:mi>r</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mtext>&#x02009;</mml:mtext><mml:mo>=</mml:mo><mml:mtext>&#x02009;</mml:mtext><mml:munder><mml:mstyle mathsize='140%' displaystyle='true'><mml:mo>&#x02299;</mml:mo></mml:mstyle><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>...</mml:mn><mml:mo>,</mml:mo><mml:mi>s</mml:mi></mml:mrow></mml:munder><mml:msubsup><mml:mover accent='true'><mml:mi>&#x02131;</mml:mi><mml:mo stretchy='true'>&#x0005E;</mml:mo></mml:mover><mml:mi>i</mml:mi><mml:mrow><mml:mi>d</mml:mi><mml:mo>.</mml:mo><mml:msub><mml:mover accent='true'><mml:mi>L</mml:mi><mml:mo>&#x0005E;</mml:mo></mml:mover><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:msubsup><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mo>&#x02329;</mml:mo><mml:mi>r</mml:mi><mml:mo>.</mml:mo><mml:msup><mml:mover accent='true'><mml:mi>H</mml:mi><mml:mo>&#x0005E;</mml:mo></mml:mover><mml:mi>i</mml:mi></mml:msup><mml:mo>,</mml:mo><mml:mi>r</mml:mi><mml:mo>.</mml:mo><mml:msup><mml:mover accent='true'><mml:mi>W</mml:mi><mml:mo>&#x0005E;</mml:mo></mml:mover><mml:mi>i</mml:mi></mml:msup><mml:mo>,</mml:mo><mml:mi>w</mml:mi><mml:mo>.</mml:mo><mml:msup><mml:mover accent='true'><mml:mi>C</mml:mi><mml:mo>&#x0005E;</mml:mo></mml:mover><mml:mi>i</mml:mi></mml:msup><mml:mo>&#x0232A;</mml:mo></mml:mrow></mml:msub></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mi>M</mml:mi><mml:mi>e</mml:mi><mml:mi>m</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>y</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:mi>M</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mtext>&#x02009;</mml:mtext><mml:mo>&#x02264;</mml:mo><mml:mtext>&#x02009;</mml:mtext><mml:mi>T</mml:mi><mml:mi>M</mml:mi><mml:mo>;</mml:mo></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mtext>&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;&#x000A0;</mml:mtext><mml:mo stretchy='false'>(</mml:mo><mml:mi>M</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mtext>&#x02009;</mml:mtext><mml:mo>&#x02264;</mml:mo><mml:mtext>&#x02009;</mml:mtext><mml:mi>T</mml:mi><mml:mi>F</mml:mi><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula>
<p>Here, &#x02299;<sub><italic>i</italic> &#x0003D; 1, ..., <italic>s</italic></sub> is the composition of the layers of a given CNN <italic>M</italic>. Each layer <italic>i</italic> can be viewed as the application of function <inline-formula><mml:math id="M2"><mml:msub><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>F</mml:mi></mml:mrow><mml:mo>^</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> on its input tensor <italic>X</italic><sub><italic>i</italic></sub>, with dimensions &#x00124;<sup><italic>i</italic></sup>, &#x00174;<sup><italic>i</italic></sup>, &#x00108;<sup><italic>i</italic></sup> (height, width, channels). The layers can be repeated in a sequence of <inline-formula><mml:math id="M3"><mml:msub><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>L</mml:mi></mml:mrow><mml:mo>^</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> occurrences. The transformation process changes all three components of a network simultaneously, <italic>depth</italic> (the number of layers <inline-formula><mml:math id="M4"><mml:msub><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>L</mml:mi></mml:mrow><mml:mo>^</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>), <italic>width</italic> (the number of channels &#x00108;<sub><italic>i</italic></sub>) and <italic>resolution</italic> (the height &#x00124;<sub><italic>i</italic></sub> and width &#x00174;<sub><italic>i</italic></sub> of tensor <italic>X</italic><sub><italic>i</italic></sub>) in a balanced way. The scaling coefficients <italic>d, w, r</italic> used by Tan et al., enabled creating a bigger network <italic>M</italic> with <inline-formula><mml:math id="M5"><mml:mi>d</mml:mi><mml:mo>.</mml:mo><mml:msup><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>L</mml:mi></mml:mrow><mml:mo>^</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> occurrences of layer <italic>i</italic> and input size <italic>r</italic>.&#x00124;<sup><italic>i</italic></sup>, <italic>r</italic>.&#x00174;<sup><italic>i</italic></sup>, <italic>w</italic>.&#x00108;<sup><italic>i</italic></sup>, except for layer <italic>i</italic> &#x0003D; 0, in which the input dimensions are the same as the input image dimensions and channels. For given values of <italic>d, w, r</italic>, the cost of the scaled-up CNN is increased proportionally to <italic>d</italic>.<italic>w</italic><sup>2</sup>.<italic>r</italic><sup>2</sup>.</p>
<p>According to Tan et al., it is critical to balance the scaling coefficients in order to obtain the best accuracy/efficiency relation for a given resource constraint. To that end, a uniform compound scaling strategy is used to distribute the cost increase among these parameters through a &#x003D5; coefficient, such that <italic>d</italic> &#x0003D; &#x003B1;<sup>&#x003D5;</sup>, <italic>w</italic> &#x0003D; &#x003B2;<sup>&#x003D5;</sup>, and <italic>r</italic> &#x0003D; &#x003B3;<sup>&#x003D5;</sup> with a restriction that &#x003B1;.&#x003B2;<sup>2</sup>.&#x003B3;<sup>2</sup> &#x02248;2. The values of &#x003B1;, &#x003B2;, and &#x003B3; that produce the best accuracy are determined by a model grid search (<xref ref-type="bibr" rid="B52">52</xref>).</p>
<p>In NAR, we apply a reduction factor to each CNN component such that <italic>d</italic> &#x0003D; &#x003B1;<sup>&#x02212;&#x003D5;</sup>, <italic>w</italic> &#x0003D; &#x003B2;<sup>&#x02212;&#x003D5;</sup>, <italic>and r</italic> &#x0003D; &#x003B3;<sup>&#x02212;&#x003D5;</sup> with the same restriction valid for &#x003B1;, &#x003B2;, and &#x003B3;. This results in a theoretical reduction of <inline-formula><mml:math id="M6"><mml:mfrac><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:msup><mml:mrow><mml:mn>2</mml:mn></mml:mrow><mml:mrow><mml:mi>&#x003D5;</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:mfrac></mml:math></inline-formula> for every value of &#x003D5;. Therefore, NAR generates reduced versions of any block based CNN.</p>
</sec>
<sec>
<title>2.3. ResRep</title>
<p>ResRep (<xref ref-type="bibr" rid="B27">27</xref>) is a state-of-the-art CNN pruning strategy that uses structural re-parameterization to reduce a network&#x00027;s width. It implements a two step solution, referred to as <italic>remembering</italic> and <italic>forgetting</italic> steps inspired by neurobiology research. In the remembering step, the network is trained with the addition of <italic>compactor</italic> layers attached to the original convolutional layers. The goal is to identify filters that contribute little to the learning process. The compactors are 1 &#x000D7; 1 convolutional layers that apply gradient penalties, making some channels&#x00027; gradients approach zero. The forgetting step is executed after the remembering step and reconstructs the original model based on the compactor trained network, but without some channels.</p>
<p>A key feature of ResRep is the mechanism by which channels are selected to be removed from the original network. The selection process uses a &#x0201C;gradient resetting&#x0201D; scheme, applied to the compactors&#x00027; gradients only. A group Lasso penalty is used in conjunction with the training objective function to produce a channel-wide sparsity. The gradient resetting operation is formulated in Equation (1).</p>
<disp-formula id="E2"><label>(2)</label><mml:math id="M7"><mml:mtable class="eqnarray" columnalign="right center left"><mml:mtr><mml:mtd><mml:msub><mml:mrow><mml:mi>L</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi><mml:mi>o</mml:mi><mml:mi>t</mml:mi><mml:mi>a</mml:mi><mml:mi>l</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>X</mml:mi><mml:mo>,</mml:mo><mml:mi>Y</mml:mi><mml:mo>,</mml:mo><mml:mtext>&#x00398;</mml:mtext></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mtext>&#x000A0;</mml:mtext><mml:mo>=</mml:mo><mml:mtext>&#x000A0;</mml:mtext><mml:msub><mml:mrow><mml:mi>L</mml:mi></mml:mrow><mml:mrow><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>f</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>X</mml:mi><mml:mo>,</mml:mo><mml:mi>Y</mml:mi><mml:mo>,</mml:mo><mml:mtext>&#x00398;</mml:mtext></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mtext>&#x000A0;</mml:mtext><mml:mo>&#x0002B;</mml:mo><mml:mtext>&#x000A0;&#x003BB;</mml:mtext><mml:mi>P</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>K</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula>
<disp-formula id="E3"><label>(3)</label><mml:math id="M8"><mml:mtable columnalign='left'><mml:mtr><mml:mtd><mml:mi>G</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>F</mml:mi></mml:mstyle><mml:mo stretchy='false'>)</mml:mo><mml:mtext>&#x000A0;</mml:mtext><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mtext>&#x000A0;</mml:mtext><mml:mo>&#x02202;</mml:mo><mml:msub><mml:mi>L</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mi>o</mml:mi><mml:mi>t</mml:mi><mml:mi>a</mml:mi><mml:mi>l</mml:mi></mml:mrow></mml:msub><mml:mo stretchy='false'>(</mml:mo><mml:mi>X</mml:mi><mml:mo>,</mml:mo><mml:mi>Y</mml:mi><mml:mo>,</mml:mo><mml:mtext>&#x00398;</mml:mtext><mml:mo stretchy='false'>)</mml:mo></mml:mrow><mml:mrow><mml:mo>&#x02202;</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>F</mml:mi></mml:mstyle></mml:mrow></mml:mfrac><mml:mo>&#x02190;</mml:mo><mml:mfrac><mml:mrow><mml:mo>&#x02202;</mml:mo><mml:msub><mml:mi>L</mml:mi><mml:mrow><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>f</mml:mi></mml:mrow></mml:msub><mml:mo stretchy='false'>(</mml:mo><mml:mi>X</mml:mi><mml:mo>,</mml:mo><mml:mi>Y</mml:mi><mml:mo>,</mml:mo><mml:mtext>&#x00398;</mml:mtext><mml:mo stretchy='false'>)</mml:mo></mml:mrow><mml:mrow><mml:mo>&#x02202;</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>F</mml:mi></mml:mstyle></mml:mrow></mml:mfrac></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mo>*</mml:mo><mml:mi>m</mml:mi><mml:mtext>&#x000A0;</mml:mtext><mml:mo>+</mml:mo><mml:mtext>&#x000A0;&#x003BB;</mml:mtext><mml:mfrac><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>F</mml:mi></mml:mstyle><mml:mrow><mml:mo>&#x0007C;</mml:mo><mml:mo>&#x0007C;</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>F</mml:mi></mml:mstyle><mml:mo>&#x0007C;</mml:mo><mml:msub><mml:mo>&#x0007C;</mml:mo><mml:mi>E</mml:mi></mml:msub></mml:mrow></mml:mfrac><mml:mo>.</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula>
<p>Here, <italic>L</italic><sub><italic>total</italic></sub> is the objective function applied to input X with label Y, given current network weights &#x00398;. The &#x003BB; is a penalty strength factor and <italic>P</italic>(<italic>K</italic>) is the Lasso penalty added to the regular cost function <italic>L</italic><sub><italic>perf</italic></sub>. The gradients for each filter (<bold>F</bold>) of the convolutional layer may be zeroed with a binary mask <italic>m</italic>. The final gradient <italic>G</italic>(<bold>F</bold>) is compared to a threshold value (&#x003F5;). If it is below the threshold, the filter is removed. It is expected that <italic>G</italic>(<bold>F</bold>) will be close to zero for filters for which the binary mask <italic>m</italic> is 0, since only the penalties are considered.</p>
</sec>
</sec>
<sec sec-type="results" id="s3">
<title>3. Results</title>
<p>The network cost reduction techniques were evaluated with the TIL classification application described in Section 2.1 and two popular CNN architectures, ResNet50 V2 (<xref ref-type="bibr" rid="B53">53</xref>) and Inception V4 (<xref ref-type="bibr" rid="B54">54</xref>)&#x02014;the two CNNs had been successfully employed for whole slide image analysis in a previous work (<xref ref-type="bibr" rid="B35">35</xref>). The CNNs were trained with 4,300 image patches extracted from a set of 56 WSIs from 10 tumor tissue types, including breast, prostate and pancreatic cancer, in The Cancer Genome Atlas (TCGA) repository (<xref ref-type="bibr" rid="B55">55</xref>). Fifteen thousand patches extracted from another set of 5 WSIs comprised the test dataset. The full list of the WSIs is given in <xref ref-type="supplementary-material" rid="SM1">Supplementary Tables 3</xref>, <xref ref-type="supplementary-material" rid="SM1">4</xref>, which also includes the percentage of TIL positive patches in each WSI. The images were downloaded in their native Aperio SVS file format. SVS files have a hierarchical representation that stores multiple resolutions of the same image. We used the highest resolution available for each WSI. If an image is obtained at 40x or 20x magnifications, the physical dimensions of a pixel are 0.25&#x000D7;0.25 &#x003BC;m or 0.5&#x000D7;0.5 &#x003BC;m, respectively. We employed the OpenSlide library (<ext-link ext-link-type="uri" xlink:href="http://openslide.org/formats/aperio/">http://openslide.org/formats/aperio/</ext-link>) to read the images and extract patches. The images along with their TIL classification (Map) are publicly available (<ext-link ext-link-type="uri" xlink:href="https://cancerimagingarchive.net/datascope/TCGA_TilMap/">https://cancerimagingarchive.net/datascope/TCGA_TilMap/</ext-link>).</p>
<p>In all of the original and simplified CNN configurations, an input image patch covers a tissue area of 50&#x000D7;50&#x003BC;m, which was resized to the expected input image size of each CNN. The number of patches that a CNN has to process to analyze a WSI is the same as the other CNNs, regardless of the input size required by each CNN.</p>
<p>The deep learning models were trained and tested on a machine running Linux, equipped with 2 Intel Xeon Gold 6248 &#x0201C;Cascade Lake&#x0201D; CPUs (with 20 cores each), 512 GB of DDR4 RAM, and an NVIDIA Tesla V100 GPU with 32 GB of dedicated memory. In all of the experiments, the models were trained from scratch for a varying number of epochs (50 for NAR and 180 for ResRep, which requires a larger number of epochs to simplify the CNN) using Adam optimization algorithm, a learning rate of 0.0005, and weight decay of 0.0005. StepLR was used as learning rate scheduler for ResRep, with step size of 5 epochs and gamma as 0.5 (learning rate reduction factor). In addition to NAR and ResRep, we have also evaluated a reduction strategy in which only the input image is reduced. This strategy is called input reduction (IR). With IR, we evaluated the impact of the compound reduction implemented by NAR against input data reduction only. The IR strategy results in smaller feature maps in memory but does not require changes to the CNN architecture, which remains exactly the same as the original.</p>
<p>The classification performances of the models trained with the simplified CNNs generated by ResRep and NAR were evaluated using the Area Under the ROC Curve (AUC) metric, the values of which were computed as the mean of values from 3 runs. The values of &#x003B1; &#x0003D; 1.2, &#x003B2; &#x0003D; 1.1 and &#x003B3; &#x0003D; 1.15 used here that lead to the best performance were determined using a grid search (<xref ref-type="bibr" rid="B52">52</xref>). The execution cost of each model was measured in terms of the number of Giga- (G) required to process a given input patch covering an area of 50&#x000D7;50&#x003BC;m. The total count considers both convolutional and dense layers, given, respectively, by the relations <italic>F</italic><sub><italic>conv</italic></sub> &#x0003D; 2&#x0002A;<italic>Number of channels</italic>&#x0002A;<italic>Kernel shape</italic>&#x0002A;<italic>Output shape</italic> and <italic>F</italic><sub><italic>dense</italic></sub> &#x0003D; 2&#x0002A;<italic>Input size</italic>&#x0002A;<italic>Output size</italic>. The NAR codes were developed using Keras and Tensorflow, while ResRep was implemented with PyTorch.</p>
<sec>
<title>3.1. Simplification of ResNet50 V2 by NAR, ResRep, and IR</title>
<p>This set of experiments compare NAR, ResRep, and the Input Reduction (IR) approaches in simplifying the ResNet50 V2. The value of &#x003D5; in NAR was varied between 1 and 3. Values greater than 3 generated simplified architectures that were purely sequential models that did not resemble the original model at all. Moreover, &#x003D5; &#x0003D; 3 resulted in significant drop in classification performance.</p>
<p>The simplified CNNs generated by different configurations of NAR and ResRep are summarized in <xref ref-type="table" rid="T1">Tables 1</xref>, <xref ref-type="table" rid="T2">2</xref>, respectively. As is shown in <xref ref-type="table" rid="T1">Table 1</xref>, NAR reduces multiple components of the network; this is illustrated by different number of blocks in each stage and different filter quantity in each convolutional layer. ResRep, on the other hand, primarily prunes the filters in the last stages of the network. In <xref ref-type="table" rid="T2">Table 2</xref>, <italic>P</italic> marks positions where filters have been pruned. The filters in stage 2 are not pruned until &#x003F5; &#x0003D; 0.90 and no filters are pruned in stage 1.</p>
<table-wrap position="float" id="T1">
<label>Table 1</label>
<caption><p>Number of parameters and layers organization in original ResNet50 V2 and NAR simplified networks.</p></caption>
<table frame="hsides" rules="groups">
<thead><tr>
<th valign="top" align="left"><bold>CNN</bold></th>
<th valign="top" align="left"><bold>ORIGINAL</bold></th>
<th valign="top" align="left"><bold>NAR <bold>&#x003D5; &#x0003D; 1</bold></bold></th>
<th valign="top" align="left"><bold>NAR <bold>&#x003D5; &#x0003D; 2</bold></bold></th>
<th valign="top" align="left"><bold>NAR <bold>&#x003D5; &#x0003D; 3</bold></bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">ResNet50 V2 (<xref ref-type="bibr" rid="B53">53</xref>)</td>
<td/>
<td/>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">&#x00023; Params</td>
<td valign="top" align="left">23,568,898</td>
<td valign="top" align="left">14,583,140</td>
<td valign="top" align="left">11,274,413</td>
<td valign="top" align="left">8,514,988</td>
</tr>
<tr>
<td valign="top" align="left">Conv 1</td>
<td valign="top" align="center" colspan="4">7 &#x000D7;7, 64, stride 2</td>
</tr>
<tr>
<td valign="top" align="left">Stage 1</td>
<td valign="top" align="left"><inline-formula><mml:math id="M9"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>256</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M10"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>58</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>58</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>232</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 2</td>
<td valign="top" align="left"><inline-formula><mml:math id="M11"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>53</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>53</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>212</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 2</td>
<td valign="top" align="left"><inline-formula><mml:math id="M12"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>48</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>48</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>192</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 2</td>
</tr>
<tr>
<td valign="top" align="left">Stage 2</td>
<td valign="top" align="left"><inline-formula><mml:math id="M13"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>128</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>128</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>512</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 4</td>
<td valign="top" align="left"><inline-formula><mml:math id="M14"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>106</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>106</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>424</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M15"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>116</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>116</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>464</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M16"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>96</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>96</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>384</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 2</td>
</tr>
<tr>
<td valign="top" align="left">Stage 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M17"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>256</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>256</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>1024</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 6</td>
<td valign="top" align="left"><inline-formula><mml:math id="M18"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>233</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>233</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>932</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 5</td>
<td valign="top" align="left"><inline-formula><mml:math id="M19"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>212</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>212</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>848</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 4</td>
<td valign="top" align="left"><inline-formula><mml:math id="M20"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>192</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>192</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>768</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
</tr>
<tr>
<td valign="top" align="left">Stage 4</td>
<td valign="top" align="left"><inline-formula><mml:math id="M21"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>512</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>512</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>2048</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M22"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>465</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>465</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>1860</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 2</td>
<td valign="top" align="left"><inline-formula><mml:math id="M23"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>423</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>423</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>1692</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 2</td>
<td valign="top" align="left"><inline-formula><mml:math id="M24"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>385</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>385</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>1540</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 2</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p><italic>The parameter count considers a binary classification problem</italic>.</p>
</table-wrap-foot>
</table-wrap>
<table-wrap position="float" id="T2">
<label>Table 2</label>
<caption><p>Number of parameters and layers for the ResRep reduced networks (binary classification).</p></caption>
<table frame="hsides" rules="groups">
<thead><tr>
<th valign="top" align="left"><bold>CNN</bold></th>
<th valign="top" align="left"><bold>&#x003F5; &#x0003D; 0.82</bold></th>
<th valign="top" align="left"><bold>&#x003F5; &#x0003D; 0.84</bold></th>
<th valign="top" align="left"><bold>&#x003F5; &#x0003D; 0.86</bold></th>
<th valign="top" align="left"><bold>&#x003F5; &#x0003D; 0.88</bold></th>
<th valign="top" align="left"><bold>&#x003F5; &#x0003D; 0.90</bold></th>
<th valign="top" align="left"><bold>&#x003F5; &#x0003D; 0.92</bold></th>
<th valign="top" align="left"><bold>&#x003F5; &#x0003D; 0.94</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">ResNet50 V2 (<xref ref-type="bibr" rid="B53">53</xref>)</td>
<td/>
<td/>
<td/>
<td/>
<td/>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">&#x00023; Params</td>
<td valign="top" align="left">12,527,836</td>
<td valign="top" align="left">9,421,008</td>
<td valign="top" align="left">8,663,740</td>
<td valign="top" align="left">9,225,475</td>
<td valign="top" align="left">7,931,287</td>
<td valign="top" align="left">4,882,052</td>
<td valign="top" align="left">4,696,612</td>
</tr>
<tr>
<td valign="top" align="left">Conv 1</td>
<td valign="top" align="left" colspan="6">7 &#x000D7; 7, 64, stride 2</td>
<td/>
</tr>
<tr>
<td valign="top" align="left">Stage 1</td>
<td valign="top" align="left"><inline-formula><mml:math id="M25"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>256</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M26"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>256</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M27"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>256</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M28"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>256</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M29"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>256</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M30"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>256</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M31"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>64</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>256</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
</tr>
<tr>
<td valign="top" align="left">Stage 2</td>
<td valign="top" align="left"><inline-formula><mml:math id="M32"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>128</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>128</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>512</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 4</td>
<td valign="top" align="left"><inline-formula><mml:math id="M33"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>128</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>128</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>512</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 4</td>
<td valign="top" align="left"><inline-formula><mml:math id="M34"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>128</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>128</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>512</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 4</td>
<td valign="top" align="left"><inline-formula><mml:math id="M35"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>128</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mn>128</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>512</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7;4</td>
<td valign="top" align="left"><inline-formula><mml:math id="M36"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>128</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>512</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 4</td>
<td valign="top" align="left"><inline-formula><mml:math id="M37"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>512</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 4</td>
<td valign="top" align="left"><inline-formula><mml:math id="M38"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>512</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 4</td>
</tr>
<tr>
<td valign="top" align="left">Stage 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M39"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>1024</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 6</td>
<td valign="top" align="left"><inline-formula><mml:math id="M40"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>1024</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 6</td>
<td valign="top" align="left"><inline-formula><mml:math id="M41"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>1024</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 6</td>
<td valign="top" align="left"><inline-formula><mml:math id="M42"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>1024</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 6</td>
<td valign="top" align="left"><inline-formula><mml:math id="M43"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>1024</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 6</td>
<td valign="top" align="left"><inline-formula><mml:math id="M44"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>1024</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 6</td>
<td valign="top" align="left"><inline-formula><mml:math id="M45"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>1024</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 6</td>
</tr>
<tr>
<td valign="top" align="left">Stage 4</td>
<td valign="top" align="left"><inline-formula><mml:math id="M46"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>2048</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M47"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>2048</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M48"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>2048</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M49"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>2048</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M50"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>2048</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M51"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>2048</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
<td valign="top" align="left"><inline-formula><mml:math id="M52"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mtable style="text-align:axis;" equalrows="false" columnlines="" equalcolumns="false" class="array"><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>3</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mi>P</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>2048</mml:mn></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> &#x000D7; 3</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p><italic>In each reduction level, P indicates the block position where channels were pruned</italic>.</p>
</table-wrap-foot>
</table-wrap>
<p><xref ref-type="table" rid="T3">Table 3</xref> shows the computational requirements and classification performances of the models generated from the simplified networks. NAR with &#x003D5; &#x0003D; 2 generated a network with 70% reduction in computational requirements compared to the original network. Additionally, the AUC value obtained by the simplified model is the same as that achieved by the original model. ResRep also was able to generate simplified networks with no loss of AUC performance. However, as is shown in the table, these networks had higher computational requirements than the networks generated by NAR. Further, the IR strategy achieved competitive results as compared to ResRep, although it is a relatively simple approach. NAR has attained an overall better performance (smaller G) than IR for the same AUC. Further, it is noticeable that when the input image is reduced bellow a certain size (e.g., 119&#x000D7;119), the AUC of IR is significantly impacted.</p>
<table-wrap position="float" id="T3">
<label>Table 3</label>
<caption><p>AUC, Giga- (G) correspondent to model input size, number of parameter layers, and total of model layers of ResNet50 V2 and simplified networks by ResRep, IR, and NAR.</p></caption>
<table frame="hsides" rules="groups">
<thead><tr>
<th valign="top" align="left"><bold>CNN</bold></th>
<th valign="top" align="center"><bold>AUC</bold></th>
<th valign="top" align="center"><bold>G</bold></th>
<th valign="top" align="center"><bold>Input size</bold></th>
<th valign="top" align="center"><bold>Param. layers</bold></th>
<th valign="top" align="center"><bold>&#x00023; of layers</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">ResNet50 V2 (<xref ref-type="bibr" rid="B53">53</xref>)</td>
<td valign="top" align="center">0.86</td>
<td valign="top" align="center">9.65</td>
<td valign="top" align="center">240 &#x000D7;240</td>
<td valign="top" align="center">50</td>
<td valign="top" align="center">225</td>
</tr>
<tr>
<td valign="top" align="left">ResNet ResRep &#x003F5; &#x0003D; 0.82</td>
<td valign="top" align="center">0.87</td>
<td valign="top" align="center">8.34</td>
<td valign="top" align="center">240 &#x000D7;240</td>
<td valign="top" align="center">50</td>
<td valign="top" align="center">225</td>
</tr>
<tr>
<td valign="top" align="left">ResNet ResRep &#x003F5; &#x0003D; 0.84</td>
<td valign="top" align="center">0.82</td>
<td valign="top" align="center">7.91</td>
<td/>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">ResNet ResRep &#x003F5; &#x0003D; 0.86</td>
<td valign="top" align="center">0.84</td>
<td valign="top" align="center">7.63</td>
<td/>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">ResNet ResRep &#x003F5; &#x0003D; 0.88</td>
<td valign="top" align="center">0.81</td>
<td valign="top" align="center">7.68</td>
<td/>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">ResNet ResRep &#x003F5; &#x0003D; 0.90</td>
<td valign="top" align="center"><bold>0.86</bold></td>
<td valign="top" align="center"><bold>7.27</bold></td>
<td/>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">ResNet ResRep &#x003F5; &#x0003D; 0.92</td>
<td valign="top" align="center">0.69</td>
<td valign="top" align="center">6.10</td>
<td/>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">ResNet ResRep &#x003F5; &#x0003D; 0.94</td>
<td valign="top" align="center">0.73</td>
<td valign="top" align="center">6.09</td>
<td/>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">ResNet50 V2 IR 1</td>
<td valign="top" align="center">0.88</td>
<td valign="top" align="center">7.90</td>
<td valign="top" align="center">209 &#x000D7;209</td>
<td valign="top" align="center">50</td>
<td valign="top" align="center">225</td>
</tr>
<tr>
<td valign="top" align="left">ResNet50 V2 IR 2</td>
<td valign="top" align="center">0.88</td>
<td valign="top" align="center">5.83</td>
<td valign="top" align="center">181 &#x000D7;181</td>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">ResNet50 V2 IR 3</td>
<td valign="top" align="center"><bold>0.86</bold></td>
<td valign="top" align="center"><bold>4.24</bold></td>
<td valign="top" align="center">157 &#x000D7;157</td>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">ResNet50 V2 IR 4</td>
<td valign="top" align="center">0.84</td>
<td valign="top" align="center">3.49</td>
<td valign="top" align="center">137 &#x000D7;137</td>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">ResNet50 V2 IR 5</td>
<td valign="top" align="center">0.81</td>
<td valign="top" align="center">2.56</td>
<td valign="top" align="center">119 &#x000D7;119</td>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">ResNet50 V2 IR 6</td>
<td valign="top" align="center">0.79</td>
<td valign="top" align="center">2.03</td>
<td valign="top" align="center">104 &#x000D7;104</td>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">ResNet NAR &#x003D5; &#x0003D; 1</td>
<td valign="top" align="center">0.84</td>
<td valign="top" align="center">5.15</td>
<td valign="top" align="center">209 &#x000D7;209</td>
<td valign="top" align="center">42</td>
<td valign="top" align="center">170</td>
</tr>
<tr>
<td valign="top" align="left">ResNet NAR &#x003D5; &#x0003D; 2</td>
<td valign="top" align="center"><bold>0.86</bold></td>
<td valign="top" align="center"><bold>2.96</bold></td>
<td valign="top" align="center">181 &#x000D7;181</td>
<td valign="top" align="center">36</td>
<td valign="top" align="center">160</td>
</tr>
<tr>
<td valign="top" align="left">ResNet NAR &#x003D5; &#x0003D; 3</td>
<td valign="top" align="center">0.80</td>
<td valign="top" align="center">1.53</td>
<td valign="top" align="center">157 &#x000D7;157</td>
<td valign="top" align="center">30</td>
<td valign="top" align="center">134</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p><italic>Bold values are those with good quality/performance trade offs</italic>.</p>
</table-wrap-foot>
</table-wrap>
<p>An interesting configuration of ResRep occurred when &#x003F5; was set to 0.90. The computational requirements of the simplified network was 75.0% of that of the original network, and the simplified network attained an equivalent AUC level. However, when a higher simplification value was used, there was a significant drop in AUC. For the same AUC values (e.g., 0.86), NAR generated CNNs with smaller computational requirements.</p>
</sec>
<sec>
<title>3.2. NAR and IR Performance for the Inception V4 CNN</title>
<p>This set of experiments measures the performance of NAR and IR with Inception V4 (<xref ref-type="bibr" rid="B54">54</xref>). The Inception is a deeper network than ResNet50 V2 and has a higher computational cost, thus it is another interesting case for evaluating our approach. We unfortunately have not been able to use ResRep to simplify the Inception. This CNN has a more complex architecture with multiple shortcuts and the ResRep code/documentation available does not implement Inception neither it provides clear directions on how to apply the method to other complex architectures (<xref ref-type="bibr" rid="B27">27</xref>).</p>
<p>The results of the NAR simplified networks as the &#x003D5; parameter is varied are shown in <xref ref-type="table" rid="T4">Table 4</xref>. First, it is noticeable that the original Inception showed a better classification performance as compared to ResNet (0.92 vs. 0.87). As compared to the IR strategy, NAR has again attained better performance for the same AUC level. Once again, for the best AUC score of each strategy and 0.87 AUC values, NAR requires, respectively, about 2.35 &#x000D7; and 4.93 &#x000D7; less FLOPs to compute an inference. These observations once again show the importance of a balanced compound network reduction as performed by NAR.</p>
<table-wrap position="float" id="T4">
<label>Table 4</label>
<caption><p>AUC, Giga-FLOPs (GFLOPs) correspondent to input sizes, number of parameter layers, and total layers of Inception V4 and simplified networks produced by NAR.</p></caption>
<table frame="hsides" rules="groups">
<thead><tr>
<th valign="top" align="left"><bold>CNN</bold></th>
<th valign="top" align="center"><bold>AUC</bold></th>
<th valign="top" align="center"><bold>G</bold></th>
<th valign="top" align="center"><bold>Input size</bold></th>
<th valign="top" align="center"><bold>Param. layers</bold></th>
<th valign="top" align="center"><bold>&#x00023; of layers</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Inception V4 (<xref ref-type="bibr" rid="B54">54</xref>)</td>
<td valign="top" align="center">0.92</td>
<td valign="top" align="center">15.48</td>
<td valign="top" align="center">240 &#x000D7;240</td>
<td valign="top" align="center">245</td>
<td valign="top" align="center">861</td>
</tr>
<tr>
<td valign="top" align="left">Inception IR 1</td>
<td valign="top" align="center"><bold>0.91</bold></td>
<td valign="top" align="center"><bold>9.80</bold></td>
<td valign="top" align="center">209 &#x000D7;209</td>
<td valign="top" align="center">245</td>
<td valign="top" align="center">861</td>
</tr>
<tr>
<td valign="top" align="left">Inception IR 2</td>
<td valign="top" align="center">0.89</td>
<td valign="top" align="center">6.64</td>
<td valign="top" align="center">181 &#x000D7;181</td>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">Inception IR 3</td>
<td valign="top" align="center">0.88</td>
<td valign="top" align="center">4.79</td>
<td valign="top" align="center">158 &#x000D7;158</td>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">Inception IR 4</td>
<td valign="top" align="center">0.87</td>
<td valign="top" align="center">2.76</td>
<td valign="top" align="center">137 &#x000D7;137</td>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">Inception IR 5</td>
<td valign="top" align="center">0.86</td>
<td valign="top" align="center">1.92</td>
<td valign="top" align="center">119 &#x000D7;119</td>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">Inception IR 6</td>
<td valign="top" align="center">0.77</td>
<td valign="top" align="center">1.14</td>
<td valign="top" align="center">104 &#x000D7;104</td>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">Inception NAR &#x003D5; &#x0003D; 1</td>
<td valign="top" align="center"><bold>0.91</bold></td>
<td valign="top" align="center"><bold>8.14</bold></td>
<td valign="top" align="center">209 &#x000D7;209</td>
<td valign="top" align="center">206</td>
<td valign="top" align="center">723</td>
</tr>
<tr>
<td valign="top" align="left">Inception NAR &#x003D5; &#x0003D; 2</td>
<td valign="top" align="center"><bold>0.92</bold></td>
<td valign="top" align="center"><bold>4.17</bold></td>
<td valign="top" align="center">181 &#x000D7;181</td>
<td valign="top" align="center">179</td>
<td valign="top" align="center">627</td>
</tr>
<tr>
<td valign="top" align="left">Inception NAR &#x003D5; &#x0003D; 3</td>
<td valign="top" align="center">0.90</td>
<td valign="top" align="center">2.21</td>
<td valign="top" align="center">158 &#x000D7;158</td>
<td valign="top" align="center">145</td>
<td valign="top" align="center">507</td>
</tr>
<tr>
<td valign="top" align="left">Inception NAR &#x003D5; &#x0003D; 4</td>
<td valign="top" align="center">0.88</td>
<td valign="top" align="center">1.02</td>
<td valign="top" align="center">137 &#x000D7;137</td>
<td valign="top" align="center">123</td>
<td valign="top" align="center">429</td>
</tr>
<tr>
<td valign="top" align="left">Inception NAR &#x003D5; &#x0003D; 5</td>
<td valign="top" align="center">0.87</td>
<td valign="top" align="center">0.56</td>
<td valign="top" align="center">119 &#x000D7;119</td>
<td valign="top" align="center">101</td>
<td valign="top" align="center">351</td>
</tr>
<tr>
<td valign="top" align="left">Inception NAR &#x003D5; &#x0003D; 6</td>
<td valign="top" align="center">0.84</td>
<td valign="top" align="center">0.28</td>
<td valign="top" align="center">104 &#x000D7;104</td>
<td valign="top" align="center">91</td>
<td valign="top" align="center">315</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p><italic>Bold values are those with good quality/performance trade offs</italic>.</p>
</table-wrap-foot>
</table-wrap>
<p>Further, the NAR simplified version had a far better trade-off in terms of the GFLOPs required to attain a certain AUC when compared to ResNet. For instance, NAR &#x003D5; &#x0003D; 5 reached an AUC of 0.87 with only 0.56 GFLOPs. A comparable performance level required at least 8.34 GFLOPs and 2.96 GFLOPs with the simplified ResNet networks, respectively, with ResRep and NAR. The results also show that the simplified Inception V4 can sustain the same AUC level as the original network with a computational cost reduction of about 4 &#x000D7; (NAR &#x003D5; &#x0003D; 2).</p>
</sec>
</sec>
<sec sec-type="discussion" id="s4">
<title>4. Discussion</title>
<p>Overall, the experimental evaluation shows that it is possible to simplify a classification CNN to reduce its computational requirements in the inference phase, while maintaining model performance comparable to the original CNN. The ResNet50 models generated by ResRep with &#x003F5; &#x0003D; 0.90 and by NAR with &#x003D5; &#x0003D; 2 practically achieved the same AUC scores as the models from the original ResNet50 V2 network and were computationally 1.32 &#x000D7; and 3.26 &#x000D7; cheaper, respectively. Our method, NAR, produced more efficient networks than ResRep. We attribute this improvement to the fact that NAR employs an approach that simplifies the multiple components of a network in a more balanced manner. The analysis of the shape structure of the simplified CNNs with both methods (shown in <xref ref-type="table" rid="T1">Tables 1</xref>, <xref ref-type="table" rid="T2">2</xref>) highlights the main differences among their simplification strategies. ResRep mainly modified the latest layers of the CNNs, while NAR carried out a more homogeneous simplification over all of the network stages. Previous work (<xref ref-type="bibr" rid="B28">28</xref>, <xref ref-type="bibr" rid="B50">50</xref>, <xref ref-type="bibr" rid="B51">51</xref>) demonstrated that such a balance among the CNN components is important to maximize classification quality. The better compromises of NAR vs. IR strategy also demonstrate in practice that the compound reduction performed by the first is important into maximizing AUC while reducing the FLOPs demand.</p>
<p>This observation aligns well with the goal of our NAR method, which is to modify the width, depth, and input resolution components of a network together and in a simple way. Additionally, NAR is easier to use, requiring few alterations to an original network, without the need to change the training dynamics. ResRep, on the other hand, is harder to use as it requires changing the network with extra layers and also includes new CNN training penalties etc. This is even harder with deeper CNNs that are becoming more popular.</p>
<p>In the experiments with Inception V4, which is a deeper network than ResNet50, we observed that the original Inception V4 has achieved overall better AUC than the original ResNet50, but it was about 1.6 &#x000D7; more expensive. The simplified version generated by NAR with &#x003D5; &#x0003D; 5 achieved an AUC value of 0.87, which is comparable to the original ResNet50 network, and was faster than the simplified ResNet with the same AUC value; the simplified Inception V4 model required 0.56 GFLOPs while the simplified ResNet50 model required 2.96 GFLOPs (about 5.3 &#x000D7; more expensive). Our experimental evaluation suggests that during the development of a deep learning network, it may be better to focus on the classification performance of the network and worry less about its computational requirements and further apply a network simplification step after the network architecture has been fine-tuned for classification performance.</p>
<p>In our work we used classification of TILs in whole slide images as the driving application use case. We expect that our method can be generalized to other classification problems in digital pathology. Characterization of TIL patterns in whole slide images is an important use case. Multiple studies have shown that there is a correlation between the density and spatial organization of TILs and clinical outcomes (<xref ref-type="bibr" rid="B37">37</xref>, <xref ref-type="bibr" rid="B38">38</xref>, <xref ref-type="bibr" rid="B56">56</xref>, <xref ref-type="bibr" rid="B57">57</xref>). Characterizations of TIL patterns can lead to better understanding of cancer mechanisms and improve cancer staging (<xref ref-type="bibr" rid="B58">58</xref>). There is an increasing number of computational pathology approaches to generate such characterizations (<xref ref-type="bibr" rid="B34">34</xref>, <xref ref-type="bibr" rid="B59">59</xref>, <xref ref-type="bibr" rid="B60">60</xref>).</p>
<p>Applications of deep learning methods for TIL analysis on a large number of whole slide images is desirable, as they can result in a better understanding of TIL patterns. It is important to employ effective and efficient deep learning methods in order to facilitate such applications. We have shown that our approach can reduce computational requirements by roughly of 4 &#x000D7; without impacting overall classification quality for two real-world CNN networks. This is a significant improvement in execution cost and can enable a broader use of these techniques in digital pathology. We also believe this paper opens multiple interesting directions for future work. First, as briefly discussed, it would be important to evaluate a larger number of CNN architectures to analyze how simplification methods would affect their AUC and count. This could answer the question regarding whether the developer should worry or not about the FLOPs required or network complexity during the development, or if this could be resolved by simplification methods in all cases. Second, we also want to expand this analysis with additional pathology image analysis applications, including not only additional classification applications but also segmentation tasks, for instance.</p>
</sec>
<sec sec-type="data-availability" id="s5">
<title>Data Availability Statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec id="s6">
<title>Author Contributions</title>
<p>AM implemented the code, performed the experiments, and organized the dataset. AM, TK, and GT performed the experimental analysis. AM and GT wrote the first draft of the manuscript. AM, TK, JK, RF, JS, and GT wrote the final manuscript. All authors contributed to conception and design of the study. All authors contributed to manuscript revision, read, and approved the submitted version.</p>
</sec>
<sec sec-type="funding-information" id="s7">
<title>Funding</title>
<p>This work was supported in part by 1UG3CA225021 from the NCI, R01LM011119-01 and R01LM009239 from the NLM, CNPq, Capes/Brazil Grants PROCAD-183794, FAPEMIG, PROCAD/UFMG, K25CA181503, and U01CA242936 from National Institute of Health and generous donations from Bob Beals and Betsy Barton. This work used the Extreme Science and Engineering Discovery Environment (XSEDE), which is supported by National Science Foundation Grant Number ACI-1548562. Specifically, it used the Bridges system, which is supported by NSF award number ACI-1445606, at the Pittsburgh Supercomputing Center (PSC).</p>
</sec>
<sec sec-type="COI-statement" id="conf1">
<title>Conflict of Interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="disclaimer" id="s8">
<title>Publisher&#x00027;s Note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec> 
</body>
<back>
<sec sec-type="supplementary-material" id="s9">
<title>Supplementary Material</title>
<p>The Supplementary Material for this article can be found online at: <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/articles/10.3389/fmed.2022.894430/full#supplementary-material">https://www.frontiersin.org/articles/10.3389/fmed.2022.894430/full#supplementary-material</ext-link></p>
<supplementary-material xlink:href="Data_Sheet_1.PDF" id="SM1" mimetype="application/pdf" xmlns:xlink="http://www.w3.org/1999/xlink"/>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<label>1.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gurcan</surname> <given-names>MN</given-names></name> <name><surname>Boucheron</surname> <given-names>LE</given-names></name> <name><surname>Can</surname> <given-names>A</given-names></name> <name><surname>Madabhushi</surname> <given-names>A</given-names></name> <name><surname>Rajpoot</surname> <given-names>NM</given-names></name> <name><surname>Yener</surname> <given-names>B</given-names></name></person-group>. <article-title>Histopathological image analysis: a review</article-title>. <source>IEEE Rev Biomed Eng</source>. (<year>2009</year>) <volume>2</volume>:<fpage>147</fpage>&#x02013;<lpage>71</lpage>. <pub-id pub-id-type="doi">10.1109/RBME.2009.2034865</pub-id><pub-id pub-id-type="pmid">20671804</pub-id></citation></ref>
<ref id="B2">
<label>2.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>C</given-names></name> <name><surname>Xue</surname> <given-names>D</given-names></name> <name><surname>Hu</surname> <given-names>Z</given-names></name> <name><surname>Chen</surname> <given-names>H</given-names></name> <name><surname>Yao</surname> <given-names>Y</given-names></name> <name><surname>Zhang</surname> <given-names>Y</given-names></name> <etal/></person-group>. <article-title>A survey for breast histopathology image analysis using classical and deep neural networks</article-title>. In: <source>International Conference on Information Technologies in Biomedicine</source>. <publisher-loc>Springer</publisher-loc> (<year>2019</year>). p. <fpage>222</fpage>&#x02013;<lpage>33</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-030-23762-2_20</pub-id></citation>
</ref>
<ref id="B3">
<label>3.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Madabhushi</surname> <given-names>A</given-names></name> <name><surname>Lee</surname> <given-names>G</given-names></name></person-group>. <source>Image Analysis and Machine Learning in Digital Pathology: Challenges and Opportunities</source>. <publisher-loc>Stockholm</publisher-loc>: <publisher-name>Elsevier</publisher-name> (<year>2016</year>). <pub-id pub-id-type="doi">10.1016/j.media.2016.06.037</pub-id><pub-id pub-id-type="pmid">27423409</pub-id></citation></ref>
<ref id="B4">
<label>4.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Barker</surname> <given-names>J</given-names></name> <name><surname>Hoogi</surname> <given-names>A</given-names></name> <name><surname>Depeursinge</surname> <given-names>A</given-names></name> <name><surname>Rubin</surname> <given-names>DL</given-names></name></person-group>. <article-title>Automated classification of brain tumor type in whole-slide digital pathology images using local representative tiles</article-title>. <source>Med Image Anal</source>. (<year>2016</year>) <volume>30</volume>:<fpage>60</fpage>&#x02013;<lpage>71</lpage>. <pub-id pub-id-type="doi">10.1016/j.media.2015.12.002</pub-id><pub-id pub-id-type="pmid">26854941</pub-id></citation></ref>
<ref id="B5">
<label>5.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Spanhol</surname> <given-names>FA</given-names></name> <name><surname>Oliveira</surname> <given-names>LS</given-names></name> <name><surname>Petitjean</surname> <given-names>C</given-names></name></person-group>. <article-title>Breast cancer histopathological image classification using convolutional neural networks</article-title>. <source>2016 International Joint Conference on Neural Networks (IJCNN)</source>. Vancouver, BC (<year>2016</year>). <pub-id pub-id-type="doi">10.1109/IJCNN.2016.7727519</pub-id><pub-id pub-id-type="pmid">30925170</pub-id></citation></ref>
<ref id="B6">
<label>6.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Xu</surname> <given-names>Y</given-names></name> <name><surname>Xia</surname> <given-names>Z</given-names></name> <name><surname>Ai</surname> <given-names>Y</given-names></name> <name><surname>Zhang</surname> <given-names>F</given-names></name> <name><surname>Lai</surname> <given-names>M</given-names></name> <name><surname>Chang</surname> <given-names>EIC</given-names></name></person-group>. <article-title>Deep convolutional activation features for large scale brain tumor histopathology image classification and segmentation</article-title>. In: IEEE, editor. <source>2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)</source>. <publisher-loc>Brisbane, QLD</publisher-loc>: <publisher-name>IEEE</publisher-name> (<year>2015</year>). <pub-id pub-id-type="doi">10.1109/ICASSP.2015.7178109</pub-id><pub-id pub-id-type="pmid">28549410</pub-id></citation></ref>
<ref id="B7">
<label>7.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dimitriou</surname> <given-names>N</given-names></name> <name><surname>Arandjelovic</surname> <given-names>O</given-names></name> <name><surname>Caie</surname> <given-names>PD</given-names></name></person-group>. <article-title>Deep learning for whole slide image analysis: an overview</article-title>. <source>Front Med</source>. (<year>2019</year>) <volume>6</volume>:<fpage>264</fpage>. <pub-id pub-id-type="doi">10.3389/fmed.2019.00264</pub-id><pub-id pub-id-type="pmid">32974358</pub-id></citation></ref>
<ref id="B8">
<label>8.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>S</given-names></name> <name><surname>Yang</surname> <given-names>DM</given-names></name> <name><surname>Rong</surname> <given-names>R</given-names></name> <name><surname>Zhan</surname> <given-names>X</given-names></name> <name><surname>Xiao</surname> <given-names>G</given-names></name></person-group>. <article-title>Pathology image analysis using segmentation deep learning algorithms</article-title>. <source>Am J Pathol</source>. (<year>2019</year>) <volume>189</volume>:<fpage>1686</fpage>&#x02013;<lpage>98</lpage>. <pub-id pub-id-type="doi">10.1016/j.ajpath.2019.05.007</pub-id><pub-id pub-id-type="pmid">31199919</pub-id></citation></ref>
<ref id="B9">
<label>9.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Binder</surname> <given-names>T</given-names></name> <name><surname>Tantaoui</surname> <given-names>EM</given-names></name> <name><surname>Pati</surname> <given-names>P</given-names></name> <name><surname>Catena</surname> <given-names>R</given-names></name> <name><surname>Set-Aghayan</surname> <given-names>A</given-names></name> <name><surname>Gabrani</surname> <given-names>M</given-names></name></person-group>. <article-title>Multi-organ gland segmentation using deep learning</article-title>. <source>Front Med</source>. (<year>2019</year>) <volume>6</volume>:<fpage>173</fpage>. <pub-id pub-id-type="doi">10.3389/fmed.2019.00173</pub-id><pub-id pub-id-type="pmid">31428614</pub-id></citation></ref>
<ref id="B10">
<label>10.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Serag</surname> <given-names>A</given-names></name> <name><surname>Ion-Margineanu</surname> <given-names>A</given-names></name> <name><surname>Qureshi</surname> <given-names>H</given-names></name> <name><surname>McMillan</surname> <given-names>R</given-names></name> <name><surname>Saint Martin</surname> <given-names>MJ</given-names></name> <name><surname>Diamond</surname> <given-names>J</given-names></name> <etal/></person-group>. <article-title>Translational AI and deep learning in diagnostic pathology</article-title>. <source>Front Med</source>. (<year>2019</year>) <volume>6</volume>:<fpage>185</fpage>. <pub-id pub-id-type="doi">10.3389/fmed.2019.00185</pub-id><pub-id pub-id-type="pmid">31632973</pub-id></citation></ref>
<ref id="B11">
<label>11.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Grote</surname> <given-names>A</given-names></name> <name><surname>Schaadt</surname> <given-names>NS</given-names></name> <name><surname>Forestier</surname> <given-names>G</given-names></name> <name><surname>Wemmert</surname> <given-names>C</given-names></name> <name><surname>Feuerhake</surname> <given-names>F</given-names></name></person-group>. <article-title>Crowdsourcing of histological image labeling and object delineation by medical students</article-title>. <source>IEEE Trans Med Imaging</source>. (<year>2018</year>) <volume>38</volume>:<fpage>1284</fpage>&#x02013;<lpage>94</lpage>. <pub-id pub-id-type="doi">10.1109/TMI.2018.2883237</pub-id><pub-id pub-id-type="pmid">30489264</pub-id></citation></ref>
<ref id="B12">
<label>12.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>&#x000D8;rting</surname> <given-names>S</given-names></name> <name><surname>Doyle</surname> <given-names>A</given-names></name> <name><surname>van Hilten</surname> <given-names>MHA</given-names></name> <name><surname>Inel</surname> <given-names>O</given-names></name> <name><surname>Madan</surname> <given-names>CR</given-names></name> <name><surname>Mavridis</surname> <given-names>P</given-names></name> <etal/></person-group>. <article-title>A survey of crowdsourcing in medical image analysis</article-title>. <source>arXiv preprint arXiv:190209159</source>. (<year>2019</year>). <pub-id pub-id-type="doi">10.15346/hc.v7i1.1</pub-id></citation>
</ref>
<ref id="B13">
<label>13.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Singh</surname> <given-names>A</given-names></name> <name><surname>Sengupta</surname> <given-names>S</given-names></name> <name><surname>Lakshminarayanan</surname> <given-names>V</given-names></name></person-group>. <article-title>Explainable deep learning models in medical image analysis</article-title>. <source>J Imaging</source>. (<year>2020</year>) <volume>6</volume>:<fpage>52</fpage>. <pub-id pub-id-type="doi">10.3390/jimaging6060052</pub-id><pub-id pub-id-type="pmid">34460598</pub-id></citation></ref>
<ref id="B14">
<label>14.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Amann</surname> <given-names>J</given-names></name> <name><surname>Vetter</surname> <given-names>D</given-names></name> <name><surname>Blomberg</surname> <given-names>SN</given-names></name> <name><surname>Christensen</surname> <given-names>HC</given-names></name> <name><surname>Coffee</surname> <given-names>M</given-names></name> <name><surname>Gerke</surname> <given-names>S</given-names></name> <etal/></person-group>. <article-title>To explain or not to explain? Artificial intelligence explainability in clinical decision support systems</article-title>. <source>PLoS Digit Health</source>. (<year>2022</year>) <volume>1</volume>:<fpage>e0000016</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pdig.0000016</pub-id></citation>
</ref>
<ref id="B15">
<label>15.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Thompson</surname> <given-names>NC</given-names></name> <name><surname>Greenewald</surname> <given-names>KH</given-names></name> <name><surname>Lee</surname> <given-names>K</given-names></name> <name><surname>Manso</surname> <given-names>GF</given-names></name></person-group>. <article-title>The computational limits of deep learning</article-title>. <source>arXiv preprint arXiv:2007.05558</source>. (<year>2020</year>). <pub-id pub-id-type="doi">10.48550/arXiv.2007.05558</pub-id></citation>
</ref>
<ref id="B16">
<label>16.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Han</surname> <given-names>S</given-names></name> <name><surname>Mao</surname> <given-names>H</given-names></name> <name><surname>Dally</surname> <given-names>WJ</given-names></name></person-group>. <article-title>Deep compression: Compressing deep neural networks with pruning, trained quantization and Huffman coding</article-title>. <source>arXiv preprint arXiv:151000149</source>. (<year>2015</year>).</citation>
</ref>
<ref id="B17">
<label>17.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Lin</surname> <given-names>S</given-names></name> <name><surname>Ji</surname> <given-names>R</given-names></name> <name><surname>Li</surname> <given-names>Y</given-names></name> <name><surname>Wu</surname> <given-names>Y</given-names></name> <name><surname>Huang</surname> <given-names>F</given-names></name> <name><surname>Zhang</surname> <given-names>B</given-names></name></person-group>. <article-title>Accelerating convolutional networks via global &#x00026; dynamic filter pruning</article-title>. In: <source>IJCAI</source>. <publisher-loc>Shenyang</publisher-loc> (<year>2018</year>). p. 8. <pub-id pub-id-type="doi">10.24963/ijcai.2018/336</pub-id></citation>
</ref>
<ref id="B18">
<label>18.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Shao</surname> <given-names>M</given-names></name> <name><surname>Dai</surname> <given-names>J</given-names></name> <name><surname>Kuang</surname> <given-names>J</given-names></name> <name><surname>Meng</surname> <given-names>D</given-names></name></person-group>. <article-title>A dynamic CNN pruning method based on matrix similarity</article-title>. <source>Signal Image Video Process</source>. (<year>2021</year>) <volume>15</volume>:<fpage>381</fpage>&#x02013;<lpage>9</lpage>. <pub-id pub-id-type="doi">10.1007/s11760-020-01760-x</pub-id></citation>
</ref>
<ref id="B19">
<label>19.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Ding</surname> <given-names>X</given-names></name> <name><surname>Zhou</surname> <given-names>X</given-names></name> <name><surname>Guo</surname> <given-names>Y</given-names></name> <name><surname>Han</surname> <given-names>J</given-names></name> <name><surname>Liu</surname> <given-names>J</given-names></name> <etal/></person-group>. <article-title>Global sparse momentum SGD for pruning very deep neural networks</article-title>. In: Wallach HM, Larochelle H, Beygelzimer A, d&#x00027;Alch&#x000E9;-Buc, Fox EB, editors. <source>Advances in Neural Information Processing Systems 32</source>. <publisher-loc>Vancouver, BC</publisher-loc>: <publisher-name>Curran Associates, Inc</publisher-name> (<year>2019</year>). p. <fpage>6382</fpage>&#x02013;<lpage>94</lpage>.</citation>
</ref>
<ref id="B20">
<label>20.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Han</surname> <given-names>S</given-names></name> <name><surname>Pool</surname> <given-names>J</given-names></name> <name><surname>Tran</surname> <given-names>J</given-names></name> <name><surname>Dally</surname> <given-names>W</given-names></name></person-group>. <article-title>Learning both weights and connections for efficient neural network</article-title>. In: Cortes C, Lawrence N, Lee D, Sugiyama M, Garnett R, editors. <source>Advances in Neural Information Processing Systems 28</source>. <publisher-loc>Montreal, QC</publisher-loc>: <publisher-name>Curran Associates, Inc</publisher-name> (<year>2015</year>). p. 1&#x02013;9</citation>
</ref>
<ref id="B21">
<label>21.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Ba</surname> <given-names>J</given-names></name> <name><surname>Caruana</surname> <given-names>R</given-names></name></person-group>. <article-title>Do deep nets really need to be deep?</article-title> In: Ghahramani Z, Welling M, Cortes C, Lawrence N, Weinberger KQ, editors. <source>Advances in Neural Information Processing Systems 27</source>. <publisher-loc>Montreal, QC</publisher-loc>: <publisher-name>Curran Associates, Inc</publisher-name> (<year>2014</year>). p. <fpage>1</fpage>&#x02013;<lpage>9</lpage>.</citation>
</ref>
<ref id="B22">
<label>22.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Banner</surname> <given-names>R</given-names></name> <name><surname>Nahshan</surname> <given-names>Y</given-names></name> <name><surname>Soudry</surname> <given-names>D</given-names></name></person-group>. <article-title>Post training 4-bit quantization of convolutional networks for rapid-deployment</article-title>. In: Wallach H, Larochelle H, Beygelzimer A, d&#x00027;Alch&#x000E9;-Buc F, Fox E, Garnett R, editors. <source>Advances in Neural Information Processing Systems 32</source>. <publisher-loc>Vancouver, BC</publisher-loc>: <publisher-name>Curran Associates, Inc</publisher-name> (<year>2019</year>). p. <fpage>1</fpage>&#x02013;<lpage>9</lpage>.</citation>
</ref>
<ref id="B23">
<label>23.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Xu</surname> <given-names>S</given-names></name> <name><surname>Huang</surname> <given-names>A</given-names></name> <name><surname>Chen</surname> <given-names>L</given-names></name> <name><surname>Zhang</surname> <given-names>B</given-names></name></person-group>. <article-title>Convolutional neural network pruning: a survey</article-title>. In: <source>2020 39th Chinese Control Conference (CCC)</source>. <publisher-loc>Long Beach, CA</publisher-loc>: <publisher-name>IEEE</publisher-name> (<year>2020</year>). p. <fpage>7458</fpage>&#x02013;<lpage>63</lpage>. <pub-id pub-id-type="doi">10.23919/CCC50068.2020.9189610</pub-id></citation>
</ref>
<ref id="B24">
<label>24.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>He</surname> <given-names>Y</given-names></name> <name><surname>Liu</surname> <given-names>P</given-names></name> <name><surname>Wang</surname> <given-names>Z</given-names></name> <name><surname>Hu</surname> <given-names>Z</given-names></name> <name><surname>Yang</surname> <given-names>Y</given-names></name></person-group>. <article-title>Filter pruning via geometric median for deep convolutional neural networks acceleration</article-title>. In: <source>Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition</source>. Montreal, QC (<year>2019</year>). p. <fpage>4340</fpage>&#x02013;<lpage>9</lpage>. <pub-id pub-id-type="doi">10.1109/CVPR.2019.00447</pub-id></citation>
</ref>
<ref id="B25">
<label>25.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Luo</surname> <given-names>JH</given-names></name> <name><surname>Zhang</surname> <given-names>H</given-names></name> <name><surname>Zhou</surname> <given-names>HY</given-names></name> <name><surname>Xie</surname> <given-names>CW</given-names></name> <name><surname>Wu</surname> <given-names>J</given-names></name> <name><surname>Lin</surname> <given-names>W</given-names></name></person-group>. <article-title>Thinet: pruning CNN filters for a thinner net</article-title>. <source>IEEE Trans Pattern Anal Mach Intell</source>. (<year>2018</year>) <volume>41</volume>:<fpage>2525</fpage>&#x02013;<lpage>38</lpage>. <pub-id pub-id-type="doi">10.1109/TPAMI.2018.2858232</pub-id><pub-id pub-id-type="pmid">30040622</pub-id></citation></ref>
<ref id="B26">
<label>26.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lin</surname> <given-names>S</given-names></name> <name><surname>Ji</surname> <given-names>R</given-names></name> <name><surname>Yan</surname> <given-names>C</given-names></name> <name><surname>Zhang</surname> <given-names>B</given-names></name> <name><surname>Cao</surname> <given-names>L</given-names></name> <name><surname>Ye</surname> <given-names>Q</given-names></name> <etal/></person-group>. <article-title>Towards optimal structured cnn pruning via generative adversarial learning</article-title>. In: <source>Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition</source>. (<year>2019</year>). p. <fpage>2790</fpage>&#x02013;<lpage>9</lpage>. <pub-id pub-id-type="doi">10.1109/CVPR.2019.00290</pub-id></citation>
</ref>
<ref id="B27">
<label>27.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Ding</surname> <given-names>X</given-names></name> <name><surname>Hao</surname> <given-names>T</given-names></name> <name><surname>Tan</surname> <given-names>J</given-names></name> <name><surname>Liu</surname> <given-names>J</given-names></name> <name><surname>Han</surname> <given-names>J</given-names></name> <name><surname>Guo</surname> <given-names>Y</given-names></name> <etal/></person-group>. <article-title>ResRep: lossless CNN pruning via decoupling remembering and forgetting</article-title>. In: <source>Proceedings of the IEEE/CVF International Conference on Computer Vision</source>. <publisher-loc>Berlin</publisher-loc> (<year>2021</year>). p. <fpage>4510</fpage>&#x02013;<lpage>20</lpage>. <pub-id pub-id-type="doi">10.1109/ICCV48922.2021.00447</pub-id></citation>
</ref>
<ref id="B28">
<label>28.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Lu</surname> <given-names>Z</given-names></name> <name><surname>Pu</surname> <given-names>H</given-names></name> <name><surname>Wang</surname> <given-names>F</given-names></name> <name><surname>Hu</surname> <given-names>Z</given-names></name> <name><surname>Wang</surname> <given-names>L</given-names></name></person-group>. <article-title>The expressive power of neural networks: a view from the width</article-title>. In: <source>Proceedings of the 31st International Conference on Neural Information Processing Systems. NIPS&#x00027;17</source>. <publisher-loc>Red Hook, NY</publisher-loc>: <publisher-name>Curran Associates Inc</publisher-name>. (<year>2017</year>). p. <fpage>6232</fpage>&#x02013;<lpage>40</lpage>.</citation>
</ref>
<ref id="B29">
<label>29.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zou</surname> <given-names>J</given-names></name> <name><surname>Rui</surname> <given-names>T</given-names></name> <name><surname>Zhou</surname> <given-names>Y</given-names></name> <name><surname>Yang</surname> <given-names>C</given-names></name> <name><surname>Zhang</surname> <given-names>S</given-names></name></person-group>. <article-title>Convolutional neural network simplification via feature map pruning</article-title>. <source>Comput Electric Eng</source>. (<year>2018</year>) <volume>70</volume>:<fpage>950</fpage>&#x02013;<lpage>8</lpage>. <pub-id pub-id-type="doi">10.1016/j.compeleceng.2018.01.036</pub-id></citation>
</ref>
<ref id="B30">
<label>30.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Hajabdollahi</surname> <given-names>M</given-names></name> <name><surname>Esfandiarpoor</surname> <given-names>R</given-names></name> <name><surname>Najarian</surname> <given-names>K</given-names></name> <name><surname>Karimi</surname> <given-names>N</given-names></name> <name><surname>Samavi</surname> <given-names>S</given-names></name> <name><surname>Soroushmehr</surname> <given-names>SR</given-names></name></person-group>. <article-title>Hierarchical pruning for simplification of convolutional neural networks in diabetic retinopathy classification</article-title>. In: <source>2019 41st Annual International Conference of the IEEE Engineering in Medicine and Biology Society (EMBC)</source>. <publisher-loc>Long Beach, CA</publisher-loc>: <publisher-name>IEEE</publisher-name> (<year>2019</year>). p. <fpage>970</fpage>&#x02013;<lpage>3</lpage>. <pub-id pub-id-type="doi">10.1109/EMBC.2019.8857769</pub-id><pub-id pub-id-type="pmid">31946055</pub-id></citation></ref>
<ref id="B31">
<label>31.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cheng</surname> <given-names>Y</given-names></name> <name><surname>Wang</surname> <given-names>D</given-names></name> <name><surname>Zhou</surname> <given-names>P</given-names></name> <name><surname>Zhang</surname> <given-names>T</given-names></name></person-group>. <article-title>A survey of model compression and acceleration for deep neural networks</article-title>. <source>arXiv preprint arXiv:171009282</source>. (<year>2017</year>). <pub-id pub-id-type="doi">10.48550/arXiv.1710.09282</pub-id></citation>
</ref>
<ref id="B32">
<label>32.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Ding</surname> <given-names>X</given-names></name> <name><surname>Ding</surname> <given-names>G</given-names></name> <name><surname>Guo</surname> <given-names>Y</given-names></name> <name><surname>Han</surname> <given-names>J</given-names></name> <name><surname>Yan</surname> <given-names>C</given-names></name></person-group>. <article-title>Approximated oracle filter pruning for destructive CNN width optimization</article-title>. In: <source>International Conference on Machine Learning</source>. <publisher-loc>PMLR</publisher-loc> (<year>2019</year>). p. <fpage>1607</fpage>&#x02013;<lpage>16</lpage>.</citation>
</ref>
<ref id="B33">
<label>33.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Osaku</surname> <given-names>D</given-names></name> <name><surname>Gomes</surname> <given-names>J</given-names></name> <name><surname>Falc&#x000E3;o</surname> <given-names>AX</given-names></name></person-group>. <article-title>Convolutional neural network simplification with progressive retraining</article-title>. <source>arXiv preprint arXiv:210104699</source>. (<year>2021</year>). <pub-id pub-id-type="doi">10.1016/j.patrec.2021.06.032</pub-id></citation>
</ref>
<ref id="B34">
<label>34.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Saltz</surname> <given-names>J</given-names></name> <name><surname>Gupta</surname> <given-names>R</given-names></name> <name><surname>Hou</surname> <given-names>L</given-names></name> <name><surname>Kurc</surname> <given-names>T</given-names></name> <name><surname>Singh</surname> <given-names>P</given-names></name> <name><surname>Nguyen</surname> <given-names>V</given-names></name> <etal/></person-group>. <article-title>Spatial organization and molecular correlation of tumor-infiltrating lymphocytes using deep learning on pathology images</article-title>. <source>Cell Rep</source>. (<year>2018</year>) <volume>23</volume>:<fpage>181</fpage>. <pub-id pub-id-type="doi">10.1016/j.celrep.2018.03.086</pub-id><pub-id pub-id-type="pmid">29617659</pub-id></citation></ref>
<ref id="B35">
<label>35.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Le</surname> <given-names>H</given-names></name> <name><surname>Gupta</surname> <given-names>RR</given-names></name> <name><surname>Hou</surname> <given-names>L</given-names></name> <name><surname>Abousamra</surname> <given-names>S</given-names></name> <name><surname>Fassler</surname> <given-names>D</given-names></name> <name><surname>Kurc</surname> <given-names>TM</given-names></name> <etal/></person-group>. <article-title>Utilizing automated breast cancer detection to identify spatial distributions of tumor infiltrating lymphocytes in invasive breast cancer</article-title>. <source>Am J Pathol</source>. (<year>2020</year>) <volume>190</volume>:<fpage>1491</fpage>&#x02013;<lpage>504</lpage>. <pub-id pub-id-type="doi">10.1016/j.ajpath.2020.03.012</pub-id><pub-id pub-id-type="pmid">32277893</pub-id></citation></ref>
<ref id="B36">
<label>36.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Oble</surname> <given-names>DA</given-names></name> <name><surname>Loewe</surname> <given-names>R</given-names></name> <name><surname>Yu</surname> <given-names>P</given-names></name> <name><surname>Mihm</surname> <given-names>MC</given-names></name></person-group>. <article-title>Focus on TILs: prognostic significance of tumor infiltrating lymphocytes in human melanoma</article-title>. <source>Cancer Immunity Arch</source>. (<year>2009</year>) <volume>9</volume>:<fpage>3</fpage>.<pub-id pub-id-type="pmid">19338264</pub-id></citation></ref>
<ref id="B37">
<label>37.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Angell</surname> <given-names>H</given-names></name> <name><surname>Galon</surname> <given-names>J</given-names></name></person-group>. <article-title>From the immune contexture to the Immunoscore: the role of prognostic and predictive immune markers in cancer</article-title>. <source>Curr Opin Immunol</source>. (<year>2013</year>) <volume>25</volume>:<fpage>261</fpage>&#x02013;<lpage>7</lpage>. <pub-id pub-id-type="doi">10.1016/j.coi.2013.03.004</pub-id><pub-id pub-id-type="pmid">23579076</pub-id></citation></ref>
<ref id="B38">
<label>38.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mlecnik</surname> <given-names>B</given-names></name> <name><surname>Bindea</surname> <given-names>G</given-names></name> <name><surname>Pag&#x000E9;s</surname> <given-names>F</given-names></name> <name><surname>Galon</surname> <given-names>J</given-names></name></person-group>. <article-title>Tumor immunosurveillance in human cancers</article-title>. <source>Cancer Metastasis Rev</source>. (<year>2011</year>) <volume>30</volume>:<fpage>5</fpage>&#x02013;<lpage>12</lpage>. <pub-id pub-id-type="doi">10.1007/s10555-011-9270-7</pub-id><pub-id pub-id-type="pmid">21249426</pub-id></citation></ref>
<ref id="B39">
<label>39.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Teng</surname> <given-names>MW</given-names></name> <name><surname>Ngiow</surname> <given-names>SF</given-names></name> <name><surname>Ribas</surname> <given-names>A</given-names></name> <name><surname>Smyth</surname> <given-names>MJ</given-names></name></person-group>. <article-title>Classifying cancers based on T-cell infiltration and PD-L1</article-title>. <source>Cancer Res</source>. (<year>2015</year>) <volume>75</volume>:<fpage>2139</fpage>&#x02013;<lpage>45</lpage>. <pub-id pub-id-type="doi">10.1158/0008-5472.CAN-15-0255</pub-id><pub-id pub-id-type="pmid">25977340</pub-id></citation></ref>
<ref id="B40">
<label>40.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rakaee</surname> <given-names>M</given-names></name> <name><surname>Kilvaer</surname> <given-names>TK</given-names></name> <name><surname>Dalen</surname> <given-names>SM</given-names></name> <name><surname>Richardsen</surname> <given-names>E</given-names></name> <name><surname>Paulsen</surname> <given-names>EE</given-names></name> <name><surname>Hald</surname> <given-names>SM</given-names></name> <etal/></person-group>. <article-title>Evaluation of tumor-infiltrating lymphocytes using routine H&#x00026;E slides predicts patient survival in resected non-small cell lung cancer</article-title>. <source>Hum Pathol</source>. (<year>2018</year>) <volume>79</volume>:<fpage>188</fpage>&#x02013;<lpage>98</lpage>. <pub-id pub-id-type="doi">10.1016/j.humpath.2018.05.017</pub-id><pub-id pub-id-type="pmid">29885403</pub-id></citation></ref>
<ref id="B41">
<label>41.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>S</given-names></name> <name><surname>Yang</surname> <given-names>DM</given-names></name> <name><surname>Rong</surname> <given-names>R</given-names></name> <name><surname>Zhan</surname> <given-names>X</given-names></name> <name><surname>Fujimoto</surname> <given-names>J</given-names></name> <name><surname>Liu</surname> <given-names>H</given-names></name> <etal/></person-group>. <article-title>Artificial intelligence in lung cancer pathology image analysis</article-title>. <source>Cancers</source>. (<year>2019</year>) <volume>11</volume>:<fpage>1673</fpage>. <pub-id pub-id-type="doi">10.3390/cancers11111673</pub-id><pub-id pub-id-type="pmid">31661863</pub-id></citation></ref>
<ref id="B42">
<label>42.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Komura</surname> <given-names>D</given-names></name> <name><surname>Ishikawa</surname> <given-names>S</given-names></name></person-group>. <article-title>Machine learning methods for histopathological image analysis</article-title>. <source>Comput Struct Biotechnol J</source>. (<year>2018</year>) <volume>16</volume>:<fpage>34</fpage>&#x02013;<lpage>42</lpage>. <pub-id pub-id-type="doi">10.1016/j.csbj.2018.01.001</pub-id><pub-id pub-id-type="pmid">30275936</pub-id></citation></ref>
<ref id="B43">
<label>43.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Linder</surname> <given-names>N</given-names></name> <name><surname>Taylor</surname> <given-names>JC</given-names></name> <name><surname>Colling</surname> <given-names>R</given-names></name> <name><surname>Pell</surname> <given-names>R</given-names></name> <name><surname>Alveyn</surname> <given-names>E</given-names></name> <name><surname>Joseph</surname> <given-names>J</given-names></name> <etal/></person-group>. <article-title>Deep learning for detecting tumour-infiltrating lymphocytes in testicular germ cell tumours</article-title>. <source>J Clin Pathol</source>. (<year>2019</year>) <volume>72</volume>:<fpage>157</fpage>&#x02013;<lpage>64</lpage>. <pub-id pub-id-type="doi">10.1136/jclinpath-2018-205328</pub-id><pub-id pub-id-type="pmid">30518631</pub-id></citation></ref>
<ref id="B44">
<label>44.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Goyal</surname> <given-names>M</given-names></name> <name><surname>Knackstedt</surname> <given-names>T</given-names></name> <name><surname>Yan</surname> <given-names>S</given-names></name> <name><surname>Hassanpour</surname> <given-names>S</given-names></name></person-group>. <article-title>Artificial intelligence-based image classification methods for diagnosis of skin cancer: challenges and opportunities</article-title>. <source>Comput Biol Med</source>. (<year>2020</year>) <volume>127</volume>:<fpage>104065</fpage>. <pub-id pub-id-type="doi">10.1016/j.compbiomed.2020.104065</pub-id><pub-id pub-id-type="pmid">33246265</pub-id></citation></ref>
<ref id="B45">
<label>45.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>J</given-names></name> <name><surname>Li</surname> <given-names>W</given-names></name> <name><surname>Sisk</surname> <given-names>A</given-names></name> <name><surname>Ye</surname> <given-names>H</given-names></name> <name><surname>Wallace</surname> <given-names>WD</given-names></name> <name><surname>Speier</surname> <given-names>W</given-names></name> <etal/></person-group>. <article-title>A multi-resolution model for histopathology image classification and localization with multiple instance learning</article-title>. <source>Comput Biol Med</source>. (<year>2021</year>) <volume>131</volume>:<fpage>104253</fpage>. <pub-id pub-id-type="doi">10.1016/j.compbiomed.2021.104253</pub-id><pub-id pub-id-type="pmid">33601084</pub-id></citation></ref>
<ref id="B46">
<label>46.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>George</surname> <given-names>K</given-names></name> <name><surname>Faziludeen</surname> <given-names>S</given-names></name> <name><surname>Sankaran</surname> <given-names>P</given-names></name> <name><surname>Joseph K</surname> <given-names>P</given-names></name></person-group>. <article-title>Breast cancer detection from biopsy images using nucleus guided transfer learning and belief based fusion</article-title>. <source>Comput Biol Med</source>. (<year>2020</year>) <volume>124</volume>:<fpage>103954</fpage>. <pub-id pub-id-type="doi">10.1016/j.compbiomed.2020.103954</pub-id><pub-id pub-id-type="pmid">32777599</pub-id></citation></ref>
<ref id="B47">
<label>47.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Klauschen</surname> <given-names>F</given-names></name> <name><surname>M&#x000FC;ller</surname> <given-names>KR</given-names></name> <name><surname>Binder</surname> <given-names>A</given-names></name> <name><surname>Bockmayr</surname> <given-names>M</given-names></name> <name><surname>H&#x000E4;gele</surname> <given-names>M</given-names></name> <name><surname>Seegerer</surname> <given-names>P</given-names></name> <etal/></person-group>. <article-title>Scoring of tumor-infiltrating lymphocytes: From visual estimation to machine learning</article-title>. In: <source>Seminars in Cancer Biology</source>. vol. 52. Elsevier (<year>2018</year>). p. <fpage>151</fpage>&#x02013;<lpage>7</lpage>. <pub-id pub-id-type="doi">10.1016/j.semcancer.2018.07.001</pub-id><pub-id pub-id-type="pmid">29990622</pub-id></citation></ref>
<ref id="B48">
<label>48.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Garcia</surname> <given-names>E</given-names></name> <name><surname>Hermoza</surname> <given-names>R</given-names></name> <name><surname>Castanon</surname> <given-names>CB</given-names></name> <name><surname>Cano</surname> <given-names>L</given-names></name> <name><surname>Castillo</surname> <given-names>M</given-names></name> <name><surname>Castanneda</surname> <given-names>C</given-names></name></person-group>. <article-title>Automatic lymphocyte detection on gastric cancer IHC images using deep learning</article-title>. In: <source>2017 IEEE 30th International Symposium on Computer-Based Medical Systems (CBMS)</source>. <publisher-loc>IEEE</publisher-loc> (<year>2017</year>). p. <fpage>200</fpage>&#x02013;<lpage>4</lpage>. <pub-id pub-id-type="doi">10.1109/CBMS.2017.94</pub-id></citation>
</ref>
<ref id="B49">
<label>49.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Roy</surname> <given-names>K</given-names></name> <name><surname>Banik</surname> <given-names>D</given-names></name> <name><surname>Bhattacharjee</surname> <given-names>D</given-names></name> <name><surname>Nasipuri</surname> <given-names>M</given-names></name></person-group>. <article-title>Patch-based system for classification of breast histology images using deep learning</article-title>. <source>Comput Med Imaging Graph</source>. (<year>2019</year>) <volume>71</volume>:<fpage>90</fpage>&#x02013;<lpage>3</lpage>. <pub-id pub-id-type="doi">10.1016/j.compmedimag.2018.11.003</pub-id><pub-id pub-id-type="pmid">30594745</pub-id></citation></ref>
<ref id="B50">
<label>50.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Zagoruyko</surname> <given-names>S</given-names></name> <name><surname>Komodakis</surname> <given-names>N</given-names></name></person-group>. <article-title>Wide residual networks</article-title>. In: Richard C, Wilson ERH, Smith WAP, editors. <source>Proceedings of the British Machine Vision Conference (BMVC)</source>. <publisher-loc>York</publisher-loc>: <publisher-name>BMVA Press</publisher-name> (<year>2016</year>). p. 87.1&#x02013;87.12. <pub-id pub-id-type="doi">10.5244/C.30.87</pub-id></citation>
</ref>
<ref id="B51">
<label>51.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Raghu</surname> <given-names>M</given-names></name> <name><surname>Poole</surname> <given-names>B</given-names></name> <name><surname>Kleinberg</surname> <given-names>J</given-names></name> <name><surname>Ganguli</surname> <given-names>S</given-names></name> <name><surname>Dickstein</surname> <given-names>JS</given-names></name></person-group>. <article-title>On the expressive power of deep neural networks</article-title>. In: <source>Proceedings of the 34th International Conference on Machine Learning</source> - <italic>Volume 70. ICML&#x00027;17</italic>. (<year>2017</year>). p. <fpage>2847</fpage>&#x02013;<lpage>54</lpage>.</citation>
</ref>
<ref id="B52">
<label>52.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Tan</surname> <given-names>M</given-names></name> <name><surname>Le</surname> <given-names>QV</given-names></name></person-group>. <article-title>EfficientNet: rethinking model scaling for convolutional neural networks</article-title>. In: <source>Proceedings of the 36th International Conference on Machine Learning (ICML)</source>. <publisher-loc>Long Beach, CA</publisher-loc>(<year>2019</year>).</citation>
</ref>
<ref id="B53">
<label>53.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>He</surname> <given-names>K</given-names></name> <name><surname>Zhang</surname> <given-names>X</given-names></name> <name><surname>Ren</surname> <given-names>S</given-names></name> <name><surname>Sun</surname> <given-names>J</given-names></name></person-group>. <article-title>Identity mappings in deep residual networks</article-title>. In: <source>European Conference on Computer Vision</source>. <publisher-loc>Amsterdam</publisher-loc>: <publisher-name>Springer.</publisher-name> (<year>2016</year>). p. <fpage>630</fpage>&#x02013;<lpage>45</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-319-46493-0_38</pub-id><pub-id pub-id-type="pmid">30441492</pub-id></citation></ref>
<ref id="B54">
<label>54.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Szegedy</surname> <given-names>C</given-names></name> <name><surname>Ioffe</surname> <given-names>S</given-names></name> <name><surname>Vanhoucke</surname> <given-names>V</given-names></name> <name><surname>Alemi</surname> <given-names>AA</given-names></name></person-group>. <article-title>Inception-v4, Inception-ResNet and the impact of residual connections on learning</article-title>. In: <source>Thirty-First AAAI Conference on Artificial Intelligence</source>. San Francisco, CA(<year>2017</year>).</citation>
</ref>
<ref id="B55">
<label>55.</label>
<citation citation-type="web"><person-group person-group-type="author"><collab>National Human Genome Research Institute. The Cancer Genome Atlas.</collab></person-group> (<year>2017</year>). Available online at: <ext-link ext-link-type="uri" xlink:href="https://cancergenome.nih.gov/">https://cancergenome.nih.gov/</ext-link></citation>
</ref>
<ref id="B56">
<label>56.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zitvogel</surname> <given-names>L</given-names></name> <name><surname>Tesniere</surname> <given-names>A</given-names></name> <name><surname>Kroemer</surname> <given-names>G</given-names></name></person-group>. <article-title>Cancer despite immunosurveillance: immunoselection and immunosubversion</article-title>. <source>Nat Rev Immunol</source>. (<year>2006</year>) <volume>6</volume>:<fpage>715</fpage>&#x02013;<lpage>27</lpage>. <pub-id pub-id-type="doi">10.1038/nri1936</pub-id><pub-id pub-id-type="pmid">16977338</pub-id></citation></ref>
<ref id="B57">
<label>57.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fridman</surname> <given-names>WH</given-names></name> <name><surname>Pages</surname> <given-names>F</given-names></name> <name><surname>Saut&#x000E9;s-Fridman</surname> <given-names>C</given-names></name> <name><surname>Galon</surname> <given-names>J</given-names></name></person-group>. <article-title>The immune contexture in human tumours: impact on clinical outcome</article-title>. <source>Nat Rev Cancer</source>. (<year>2012</year>) <volume>12</volume>:<fpage>298</fpage>&#x02013;<lpage>306</lpage>. <pub-id pub-id-type="doi">10.1038/nrc3245</pub-id><pub-id pub-id-type="pmid">22419253</pub-id></citation></ref>
<ref id="B58">
<label>58.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Broussard</surname> <given-names>EK</given-names></name> <name><surname>Disis</surname> <given-names>ML</given-names></name></person-group>. <article-title>TNM staging in colorectal cancer: T is for T cell and M is for memory</article-title>. <source>J Clin Oncol</source>. (<year>2011</year>) <volume>29</volume>:<fpage>601</fpage>&#x02013;<lpage>3</lpage> <pub-id pub-id-type="doi">10.1200/JCO.2010.32.9078</pub-id><pub-id pub-id-type="pmid">21245434</pub-id></citation></ref>
<ref id="B59">
<label>59.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rutledge</surname> <given-names>WC</given-names></name> <name><surname>Kong</surname> <given-names>J</given-names></name> <name><surname>Gao</surname> <given-names>J</given-names></name> <name><surname>Gutman</surname> <given-names>DA</given-names></name> <name><surname>Cooper</surname> <given-names>LA</given-names></name> <name><surname>Appin</surname> <given-names>C</given-names></name> <etal/></person-group>. <article-title>Tumor-infiltrating lymphocytes in glioblastoma are associated with specific genomic alterations and related to transcriptional class</article-title>. <source>Clin Cancer Res</source>. (<year>2013</year>) <volume>19</volume>:<fpage>4951</fpage>&#x02013;<lpage>60</lpage>. <pub-id pub-id-type="doi">10.1158/1078-0432.CCR-13-0551</pub-id><pub-id pub-id-type="pmid">23864165</pub-id></citation></ref>
<ref id="B60">
<label>60.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lazar</surname> <given-names>AJ</given-names></name> <name><surname>McLellan</surname> <given-names>MD</given-names></name> <name><surname>Bailey</surname> <given-names>MH</given-names></name> <name><surname>Miller</surname> <given-names>CA</given-names></name> <name><surname>Appelbaum</surname> <given-names>EL</given-names></name> <name><surname>Cordes</surname> <given-names>MG</given-names></name> <etal/></person-group>. <article-title>Comprehensive and integrated genomic characterization of adult soft tissue sarcomas</article-title>. <source>Cell</source>. (<year>2017</year>) <volume>171</volume>:<fpage>950</fpage>&#x02013;<lpage>65</lpage>.<pub-id pub-id-type="pmid">29100075</pub-id></citation></ref>
</ref-list> 
</back>
</article> 