<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Public Health</journal-id>
<journal-title>Frontiers in Public Health</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Public Health</abbrev-journal-title>
<issn pub-type="epub">2296-2565</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpubh.2022.769692</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Public Health</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>A Neural Network and Optimization Based Lung Cancer Detection System in CT Images</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Venkatesh</surname> <given-names>Chapala</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Ramana</surname> <given-names>Kadiyala</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c003"><sup>&#x0002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1464826/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Lakkisetty</surname> <given-names>Siva Yamini</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Band</surname> <given-names>Shahab S.</given-names></name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x0002A;</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Agarwal</surname> <given-names>Shweta</given-names></name>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Mosavi</surname> <given-names>Amir</given-names></name>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<xref ref-type="aff" rid="aff6"><sup>6</sup></xref>
<xref ref-type="aff" rid="aff7"><sup>7</sup></xref>
<xref ref-type="corresp" rid="c002"><sup>&#x0002A;</sup></xref>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Department of ECE, Annamacharya Institute of Technology and Sciences</institution>, <addr-line>Rajampet</addr-line>, <country>India</country></aff>
<aff id="aff2"><sup>2</sup><institution>Department of IT, Chaitanya Bharathi Institute of Technology</institution>, <addr-line>Hyderabad</addr-line>, <country>India</country></aff>
<aff id="aff3"><sup>3</sup><institution>Future Technology Research Center, College of Future, National Yunlin University of Science and Technology</institution>, <addr-line>Douliou</addr-line>, <country>Taiwan</country></aff>
<aff id="aff4"><sup>4</sup><institution>SAGE University</institution>, <addr-line>Indore</addr-line>, <country>India</country></aff>
<aff id="aff5"><sup>5</sup><institution>John von Neumann Faculty of Informatics, Obuda University</institution>, <addr-line>Budapest</addr-line>, <country>Hungary</country></aff>
<aff id="aff6"><sup>6</sup><institution>Faculty of Civil Engineering, TU-Dresden</institution>, <addr-line>Dresden</addr-line>, <country>Germany</country></aff>
<aff id="aff7"><sup>7</sup><institution>Institute of Information Engineering, Automation and Mathematics, Slovak University of Technology in Bratislava</institution>, <addr-line>Bratislava</addr-line>, <country>Slovakia</country></aff>
<author-notes>
<fn fn-type="edited-by"><p>Edited by: Celestine Iwendi, School of Creative Technologies University of Bolton, United Kingdom</p></fn>
<fn fn-type="edited-by"><p>Reviewed by: G Surya Narayana, Vardhaman College of Engineering, India; Chennareddy Vijay Simha Reddy, Middlesex University, United Kingdom; Venkata Subbaraju Dommaraju, University of the Cumberlands, United States</p></fn>
<corresp id="c001">&#x0002A;Correspondence: Shahab S. Band <email>shamshirbands&#x00040;yuntech.edu.tw</email></corresp>
<corresp id="c002">Amir Mosavi <email>amir.mosavi&#x00040;kvk.uni-obuda.hu</email></corresp>
<corresp id="c003">Kadiyala Ramana <email>ramana.it01&#x00040;gmail.com</email></corresp>
<fn fn-type="other" id="fn001"><p>This article was submitted to Digital Public Health, a section of the journal Frontiers in Public Health</p></fn></author-notes>
<pub-date pub-type="epub">
<day>07</day>
<month>06</month>
<year>2022</year>
</pub-date>
<pub-date pub-type="collection">
<year>2022</year>
</pub-date>
<volume>10</volume>
<elocation-id>769692</elocation-id>
<history>
<date date-type="received">
<day>02</day>
<month>09</month>
<year>2021</year>
</date>
<date date-type="accepted">
<day>20</day>
<month>01</month>
<year>2022</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x000A9; 2022 Venkatesh, Ramana, Lakkisetty, Band, Agarwal and Mosavi.</copyright-statement>
<copyright-year>2022</copyright-year>
<copyright-holder>Venkatesh, Ramana, Lakkisetty, Band, Agarwal and Mosavi</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p></license> </permissions>
<abstract>
<p>One of the most common causes of death from cancer for both women and men is lung cancer. Lung nodules are critical for the screening of cancer and early recognition permits treatment and enhances the rate of rehabilitation in patients. Although a lot of work is being done in this area, an increase in accuracy is still required to swell patient persistence rate. However, traditional systems do not segment cancer cells of different forms accurately and no system attained greater reliability. An effective screening procedure is proposed in this work to not only identify lung cancer lesions rapidly but to increase accuracy. In this procedure, Otsu thresholding segmentation is utilized to accomplish perfect isolation of the selected area, and the cuckoo search algorithm is utilized to define the best characteristics for partitioning cancer nodules. By using a local binary pattern, the relevant features of the lesion are retrieved. The CNN classifier is designed to spot whether a lung lesion is malicious or non-malicious based on the retrieved features. The proposed framework achieves an accuracy of 96.97% percent. The recommended study reveals that accuracy is improved, and the results are compiled using Particle swarm optimization and genetic algorithms.</p></abstract>
<kwd-group>
<kwd>cancer</kwd>
<kwd>lung cancer</kwd>
<kwd>machine learning</kwd>
<kwd>artificial intelligence</kwd>
<kwd>deep learning</kwd>
<kwd>cancer detection</kwd>
</kwd-group>
<counts>
<fig-count count="8"/>
<table-count count="3"/>
<equation-count count="12"/>
<ref-count count="39"/>
<page-count count="9"/>
<word-count count="4895"/>
</counts>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="s1">
<title>Introduction</title>
<p>The most well-known reason for death because of malignant growth is lung cancer. The second most habitually analyzed type of malignancy is lung cancer. Pneumonic nodules are apparent in the lung to evaluate metastases from different malignancies (<xref ref-type="bibr" rid="B1">1</xref>, <xref ref-type="bibr" rid="B2">2</xref>). Computed tomography (CT) is the most significant image mode for assessing progress/crumbling and for observation and decision-making malignant lung growths. As a result of the precocious presentation of lung malignancy by CT, doctors can be suggested more productive treatments (<xref ref-type="bibr" rid="B3">3</xref>, <xref ref-type="bibr" rid="B4">4</xref>). Guess and recuperating components for scattered sickness with precise malignancy stages are required for orderly and consoling treatment (<xref ref-type="bibr" rid="B5">5</xref>). The early conclusion of the period of lung malignancy is firmly connected to the patient&#x00027;s continuance rate (<xref ref-type="bibr" rid="B6">6</xref>). In clinical terms, the disease is known to be strange hyperplasia and significantly beyond what 200 sorts can influence the individuals (<xref ref-type="bibr" rid="B7">7</xref>). According to the ACS (American Cancer Society), lung malignancy is the main cause of death in both men and women in the United States. about a total of 2,28,820 new lung malignancy cases were estimated, with 1,35,720 deaths (<xref ref-type="bibr" rid="B8">8</xref>). It causes a larger number of deaths than other malignant tumors. Early recognition of tumorous lung nodules is the key factor for patient survival rate. When contrasted with chest X-ray imaging, CT perceives the tumorous nodules consistently at an underlying stage (<xref ref-type="bibr" rid="B9">9</xref>). Practically all radiologists use CT by exploring multiple pictures from a solitary patient. Thus, the exhaustion of the radiologists can prompt wrong analysis. Hence, the exact physical valuation measure is tedious and colossally inconsistent (<xref ref-type="bibr" rid="B10">10</xref>). A precise segment is noteworthy for the right valuation of nodule improvement and for the arrangement of malignant nodules (disease cells) from benign ones (non-disease cells). The reason for this work is to exactly recognize the nodules over the CT lung pictures.</p>
<p>The proposed instructional method pulls back, utlilizing a middle method to reduce confusion based on the CT image. Second, a crossover division approach is utilized to isolate the lung zone from its environmental factors. The proposed division strategy utilizes the Otsu thresholding to eliminate superfluous groups, consequently isolating the specific lung locales, and nodules of interest can be decisively characterized by cuckoo inquiry advancement. Third, the assortment of surface highlights for the particular nodule is undisturbed by parallel neighborhood examples at the feature stage. Finally, the highlights of the sectioned lung nodules are prepared by the CNN classifier to distinguish the lesions as malignant or non-malevolent.</p></sec>
<sec id="s2">
<title>Related Work</title>
<p>In 2019, Ananya et al. (<xref ref-type="bibr" rid="B11">11</xref>) developed a multi-approach system for lung cancer categorization using genetics. They assessed false negatives and true positives for classification accuracy in this study, but not detected accuracy.</p>
<p>In 2019, Venkatesh et al. (<xref ref-type="bibr" rid="B12">12</xref>) developed an innovative approach to detect lesions based on a GA and LBP. This process achieves an accuracy of 90%.</p>
<p>In 2019. Preeti et al. (<xref ref-type="bibr" rid="B13">13</xref>) introduced a lung cancer detection framework based on the fuzzy c-mean clustering and SVM classifier techniques</p>
<p>In 2019, Senthil Kumar et al. (<xref ref-type="bibr" rid="B14">14</xref>) introduced an approach for detecting lung lesions using GCPSO. In this work, multiple optimization techniques are used to classify cancer in CT images. The process obtained a precision of 95%.</p>
<p>In 2018, Perumal et al. (<xref ref-type="bibr" rid="B15">15</xref>) proposed an ABC algorithm for malignancy recognition and classification. This guidance attained a 92% accuracy.</p>
<p>In 2017 Ammar et al. (<xref ref-type="bibr" rid="B16">16</xref>) established an early diagnostic architecture for genetically altered tumor detection. In this study, the authors achieved an accuracy rate of 84%.</p>
<p>In 2017 Kamil et al. (<xref ref-type="bibr" rid="B17">17</xref>) introduced a DWT-based lung lesion detection system. In this method by using subtraction and erosion techniques images are analyzed to remove the cancer region. This approach yielded an accuracy of 89%.</p>
<p>In 2016 Mukesh et al. (<xref ref-type="bibr" rid="B18">18</xref>) introduced a DWT-based method for assessing a high volume of tissues in chest X-ray images. Using this method, the authors were able to achieve an accuracy of 86%.</p>
<p>In 2014 Santos et al. (<xref ref-type="bibr" rid="B19">19</xref>) described an area development and Hessian matrix to identify minor respiratory lesions. The presented approach achieves a classification accuracy of 88.4%.</p>
<p>In 2014, Jinsa and Gunavathri (<xref ref-type="bibr" rid="B20">20</xref>) reported an ANN-based lesion categorization technique. They were able to classify with an accuracy of 93.3%.</p>
<p>The principal gap has indeed been extended due to the lack of research publications that demonstrate computations. Because of poor directionality, slower processing, greater calculation time, and complex computations, the methods suggested by the authors mentioned above are less effective in all cases. As a result, an interactive technique for identifying lung cancer in CT images is suggested in this article, which uses the otsu threshold-based Cuckoo search algorithm, Local Binary Pattern for image retrieval, and CNN for classification to conquer all of the shortfalls of the existing methods. By selecting the most cost-effective strategy, optimization algorithms tend to produce a solution for image processing processes.</p></sec>
<sec id="s3">
<title>Motivation and Contribution</title>
<p>Lung cancer is confirmed by physicians after a thorough examination of CT scans, which requires a lot of time and is not always accurate. To create imagery as precise, operational, and efficient as possible, state-of-the-art optimization techniques and image processing approaches were required. The proposed technology will aid doctors in accurately identifying lung nodules at an early stage, as well as studying the internal anatomy. As a part of the contribution, some glitches related to lung cancer detection are discussed here. The region of interest is retrieved using Otsu thresholding and cuckoo search optimization, which is a novel approach to segmentation. This proposed partitioning approach requires only a few parameters to precisely separate nodules of varied sizes and shapes.</p>
<sec>
<title>Proposed Methodology</title>
<p><xref ref-type="fig" rid="F1">Figure 1</xref> depicts the prospective lung malignancy diagnostic procedure, which comprises five phases: (1) contrast enhancement and Noise reduction through pre-processing, (2) Otsu thresholding based cuckoo search algorithm to segment the lesion from its backgrounds, (3) retrieval of regions of concern, (4) retrieval of descriptors from segmented lung lesions, and, in the last phase, (5) SVM has been used to assess if the lesion was abnormal or normal. The next sections provide detailed descriptions of the above-mentioned phases.</p>
<fig id="F1" position="float">
<label>Figure 1</label>
<caption><p>Architecture of proposed method.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpubh-10-769692-g0001.tif"/>
</fig></sec>
<sec>
<title>Image Acquisition</title>
<p>It is the basic step before proceeding with other critical steps. It is a method for processing a digital image from a database (<xref ref-type="bibr" rid="B21">21</xref>). Numerous sorts of scanners, such as X-Ray, MRI, and CT, are used to obtain the images. The CT image was captured using a CT scanner. It is a type of scanning that creates cross-section scans for each pixel (<xref ref-type="bibr" rid="B22">22</xref>).</p></sec>
<sec>
<title>Pre-processing</title>
<p>The equations should be inserted in editable format from the equation editor. It is then procedure to improve image details. The basic idea is to suppress noise, which corrects undesired distortions and enhances the associated attributes of the image for subsequent processing (<xref ref-type="bibr" rid="B23">23</xref>). Because all techniques are sensitive to noise, efficient images pre-processed allow for better segmentation and, as a result, better classification. The size of the pixel area could be used to classify pre-processing procedures. Image enhancement employs these techniques. Enhancement operations operate on the image pixels of the neighborhood and the corresponding values of the neighborhood. Contribute quality to the images by decreasing noise and distortion (<xref ref-type="bibr" rid="B24">24</xref>).</p>
<disp-formula id="E1"><mml:math id="M1"><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>f</mml:mi></mml:mstyle><mml:mrow><mml:mo>(</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>x</mml:mi></mml:mstyle><mml:mo>)</mml:mo></mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>=</mml:mo></mml:mstyle><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>a</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>0</mml:mn></mml:mstyle></mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>+</mml:mo></mml:mstyle><mml:mstyle displaystyle='true'><mml:munderover><mml:mo>&#x02211;</mml:mo><mml:mrow><mml:mstyle mathvariant='bold-italic'><mml:mi>n</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mstyle></mml:mrow><mml:mi>&#x0221E;</mml:mi></mml:munderover><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>a</mml:mi></mml:mstyle><mml:mrow><mml:mstyle mathvariant='bold-italic'><mml:mi>n</mml:mi></mml:mstyle></mml:mrow></mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mi>s</mml:mi></mml:mstyle><mml:mfrac><mml:mrow><mml:mstyle mathvariant='bold-italic'><mml:mi>n</mml:mi></mml:mstyle><mml:mtext>&#x003C0;</mml:mtext><mml:mstyle mathvariant='bold-italic'><mml:mi>x</mml:mi></mml:mstyle></mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>L</mml:mi></mml:mstyle></mml:mfrac><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>+</mml:mo></mml:mstyle><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>b</mml:mi></mml:mstyle><mml:mrow><mml:mstyle mathvariant='bold-italic'><mml:mi>n</mml:mi></mml:mstyle></mml:mrow></mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>s</mml:mi><mml:mi>i</mml:mi><mml:mi>n</mml:mi></mml:mstyle><mml:mfrac><mml:mrow><mml:mstyle mathvariant='bold-italic'><mml:mi>n</mml:mi></mml:mstyle><mml:mtext>&#x003C0;</mml:mtext><mml:mstyle mathvariant='bold-italic'><mml:mi>x</mml:mi></mml:mstyle></mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>L</mml:mi></mml:mstyle></mml:mfrac></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mstyle></mml:math></disp-formula>
<sec>
<title>Median Filtering</title>
<p>Salt and pepper noise can be found on CT scans. The finest features are obscured by these impacts. By keeping the frontier of the image as fine as possible, this filtering lowers salt and pepper noise (<xref ref-type="bibr" rid="B12">12</xref>). This filter gathers information from a sample within a non-averaged window (<xref ref-type="bibr" rid="B25">25</xref>). The edges of the filter are better managed than those of other linear filters. The following equations are used to get the median value.</p>
<disp-formula id="E2"><label>(1)</label><mml:math id="M2"><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>M</mml:mi></mml:mstyle><mml:mrow><mml:mo>(</mml:mo><mml:mstyle mathvariant='bold-italic' mathsize='normal'><mml:mi>g</mml:mi></mml:mstyle><mml:mo>)</mml:mo></mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>=</mml:mo></mml:mstyle><mml:mstyle displaystyle='true'><mml:munderover><mml:mo>&#x02211;</mml:mo><mml:mrow><mml:mstyle mathvariant='bold-italic' mathsize='normal'><mml:mi>k</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mstyle></mml:mrow><mml:mstyle mathvariant='bold-italic' mathsize='normal'><mml:mi>n</mml:mi></mml:mstyle></mml:munderover><mml:mrow><mml:mrow><mml:mo>|</mml:mo><mml:mrow><mml:msub><mml:mstyle mathvariant='bold-italic' mathsize='normal'><mml:mi>x</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold-italic' mathsize='normal'><mml:mi>k</mml:mi></mml:mstyle></mml:msub><mml:mo>&#x02212;</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>g</mml:mi></mml:mstyle></mml:mrow><mml:mo>|</mml:mo></mml:mrow></mml:mrow></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>.</mml:mo></mml:mstyle></mml:math></disp-formula>
<disp-formula id="E3"><label>(2)</label><mml:math id="M3"><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>g</mml:mi><mml:mo>=</mml:mo><mml:mi>m</mml:mi><mml:mi>e</mml:mi><mml:mi>d</mml:mi><mml:mi>i</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi></mml:mstyle><mml:mrow><mml:mo>{</mml:mo><mml:mrow><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>x</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>1</mml:mn></mml:mstyle></mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>,</mml:mo></mml:mstyle><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>x</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>2</mml:mn></mml:mstyle></mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>,</mml:mo><mml:mo>&#x02026;</mml:mo><mml:mo>&#x02026;</mml:mo><mml:mo>&#x02026;</mml:mo></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>.</mml:mo></mml:mstyle><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>x</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>n</mml:mi></mml:mstyle></mml:msub></mml:mrow><mml:mo>}</mml:mo></mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>.</mml:mo></mml:mstyle></mml:math></disp-formula>
</sec></sec>
<sec>
<title>Otsu Thresholding Segmentation With Optimization</title>
<sec>
<title>Otsu Thresholding</title>
<p>The goal of this strategy is to scour specified like-classes of pixels in a picture for the closeness of neighboring pixels in order to generate a concentrated image object. Separating the background and sub-regions in medical imaging is tough (<xref ref-type="bibr" rid="B26">26</xref>). The Otsu segmentation algorithm works better to &#x0201C;recognize&#x0201D; or &#x0201C;smear&#x0201D; the context contents of the front objects. It is an adaptive threshold binarization procedure proposed by OTSU in 1979. This procedure uses the highest within-class variance between the context and the target based on the rule of threshold assortment (<xref ref-type="bibr" rid="B27">27</xref>). It segments the image into the forefront and the contextual based on the characteristics of gray level values. If the finest threshold is attained, the gap between the two regions is the highest. The Otsu algorithm, in general, utilizes the greatest within-class variance. The larger the variance value, the wider the difference between the two areas, since variance is a useful determinant of uniform gray distribution. If some areas are wrongly segmented into contextual or if some contextual is segmented into areas, then the gap is too small between the two areas. As a result, if the variance among groups is higher, the likelihood of incorrect classification is lowered, resulting in cohesive segmentation.</p>
<p>The following is the main principle of OTSU-based threshold segmentation:</p>
<p>Let us call the gray values <italic>g</italic> and the number of pixels <italic>nx</italic>. Then</p>
<disp-formula id="E4"><label>(3)</label><mml:math id="M4"><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>P</mml:mi><mml:mo>=</mml:mo></mml:mstyle><mml:mstyle displaystyle='true'><mml:msubsup><mml:mo>&#x02211;</mml:mo><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>x</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mstyle></mml:mrow><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>L</mml:mi></mml:mstyle><mml:mo>&#x02212;</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>1</mml:mn></mml:mstyle></mml:mrow></mml:msubsup><mml:mrow><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>n</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>x</mml:mi></mml:mstyle></mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>=</mml:mo></mml:mstyle><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>n</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>0</mml:mn></mml:mstyle></mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>+</mml:mo></mml:mstyle><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>n</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>1</mml:mn></mml:mstyle></mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>+</mml:mo></mml:mstyle><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>n</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>2</mml:mn></mml:mstyle></mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>+</mml:mo><mml:mo>&#x02026;</mml:mo><mml:mo>+</mml:mo></mml:mstyle><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>n</mml:mi></mml:mstyle><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>L</mml:mi></mml:mstyle><mml:mo>&#x02212;</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>1</mml:mn></mml:mstyle></mml:mrow></mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>&#x02026;</mml:mo></mml:mstyle></mml:mrow></mml:mstyle></mml:math></disp-formula>
<p>where g = 0, 1,..., L-1, and P indicate the number of pixels. Suppose C1 and C2 are the two kinds of pixels. C1 pixels have a range of [0,x] while C2 pixels have a range of [x&#x0002B; 1,L1].</p>
<disp-formula id="E5"><label>(4)</label><mml:math id="M5"><mml:msubsup><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>&#x003C3;</mml:mi></mml:mstyle><mml:mrow><mml:mstyle mathvariant='bold-italic'><mml:mi>G</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold-italic'><mml:mi>v</mml:mi></mml:mstyle></mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>2</mml:mn></mml:mstyle></mml:msubsup><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>=</mml:mo></mml:mstyle><mml:mstyle displaystyle='true'><mml:msubsup><mml:mo>&#x02211;</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>g</mml:mi></mml:mstyle><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>L</mml:mi><mml:mo>-</mml:mo><mml:mn>1</mml:mn></mml:mstyle></mml:mrow></mml:msubsup><mml:mrow><mml:msup><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo stretchy='false'>(</mml:mo><mml:mi>g</mml:mi></mml:mstyle><mml:mo>&#x02212;</mml:mo><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>m</mml:mi></mml:mstyle><mml:mrow><mml:mstyle mathvariant='bold-italic'><mml:mi>G</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold-italic'><mml:mi>v</mml:mi></mml:mstyle></mml:mrow></mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo stretchy='false'>)</mml:mo></mml:mstyle></mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>2</mml:mn></mml:mstyle></mml:msup></mml:mrow></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>.</mml:mo></mml:mstyle></mml:math></disp-formula>
<disp-formula id="E6"><label>(5)</label><mml:math id="M6"><mml:msubsup><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>&#x003C3;</mml:mi></mml:mstyle><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>B</mml:mi><mml:mi>v</mml:mi></mml:mstyle></mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>2</mml:mn></mml:mstyle></mml:msubsup><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>=</mml:mo></mml:mstyle><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>P</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>1</mml:mn></mml:mstyle></mml:msub><mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>m</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>1</mml:mn></mml:mstyle></mml:msub><mml:mo>&#x02212;</mml:mo><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>m</mml:mi></mml:mstyle><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>G</mml:mi><mml:mi>v</mml:mi></mml:mstyle></mml:mrow></mml:msub></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>2</mml:mn></mml:mstyle></mml:msup><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>+</mml:mo></mml:mstyle><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>P</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>2</mml:mn></mml:mstyle></mml:msub><mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>m</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>2</mml:mn></mml:mstyle></mml:msub><mml:mo>&#x02212;</mml:mo><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>m</mml:mi></mml:mstyle><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>G</mml:mi><mml:mi>v</mml:mi></mml:mstyle></mml:mrow></mml:msub></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>2</mml:mn></mml:mstyle></mml:msup></mml:math></disp-formula>
<p>The below-mentioned calculations are used to compute the mean intensities.</p>
<disp-formula id="E7"><label>(6)</label><mml:math id="M7"><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>m</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>1</mml:mn></mml:mstyle></mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>=</mml:mo></mml:mstyle><mml:mfrac><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>1</mml:mn></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>P</mml:mi></mml:mstyle></mml:mfrac><mml:mstyle displaystyle='true'><mml:msubsup><mml:mo>&#x02211;</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>g</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>x</mml:mi></mml:mstyle></mml:msubsup><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>g</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>.</mml:mo></mml:mstyle><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>P</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>g</mml:mi></mml:mstyle></mml:msub></mml:mrow></mml:mstyle></mml:math></disp-formula>
<disp-formula id="E8"><label>(7)</label><mml:math id="M8"><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>m</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>2</mml:mn></mml:mstyle></mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>=</mml:mo></mml:mstyle><mml:mfrac><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>1</mml:mn></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>P</mml:mi></mml:mstyle></mml:mfrac><mml:mstyle displaystyle='true'><mml:msubsup><mml:mo>&#x02211;</mml:mo><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>g</mml:mi><mml:mo>=</mml:mo></mml:mstyle><mml:mstyle mathvariant='bold-italic'><mml:mi>x</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mstyle></mml:mrow><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>L</mml:mi></mml:mstyle><mml:mo>&#x02212;</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>1</mml:mn></mml:mstyle></mml:mrow></mml:msubsup><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>g</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>.</mml:mo></mml:mstyle><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>P</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>g</mml:mi></mml:mstyle></mml:msub></mml:mrow></mml:mstyle></mml:math></disp-formula>
<disp-formula id="E9"><label>(8)</label><mml:math id="M9"><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>m</mml:mi></mml:mstyle><mml:mrow><mml:mstyle mathvariant='bold-italic'><mml:mi>G</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold-italic'><mml:mi>v</mml:mi></mml:mstyle></mml:mrow></mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>=</mml:mo></mml:mstyle><mml:mstyle displaystyle='true'><mml:msubsup><mml:mo>&#x02211;</mml:mo><mml:mrow><mml:mstyle mathvariant='bold-italic'><mml:mi>g</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mstyle></mml:mrow><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>L</mml:mi></mml:mstyle><mml:mo>&#x02212;</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>1</mml:mn></mml:mstyle></mml:mrow></mml:msubsup><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>g</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mo>.</mml:mo></mml:mstyle><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>P</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>g</mml:mi></mml:mstyle></mml:msub></mml:mrow></mml:mstyle></mml:math></disp-formula>
<p>where m 1 and m 2 are the C1 and C2 pixel average intensities, and m Gv is the global mean intensity. Lastly, the ratio &#x003C4; , which is provided below, is used to determine the ideal threshold.</p>
<disp-formula id="E10"><label>(9)</label><mml:math id="M10"><mml:mstyle mathvariant='bold-italic'><mml:mi>&#x003C4;</mml:mi><mml:mo>=</mml:mo></mml:mstyle><mml:mfrac><mml:mrow><mml:msubsup><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>&#x003C3;</mml:mi></mml:mstyle><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>B</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold-italic'><mml:mi>v</mml:mi></mml:mstyle></mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>2</mml:mn></mml:mstyle></mml:msubsup></mml:mrow><mml:mrow><mml:msubsup><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>&#x003C3;</mml:mi></mml:mstyle><mml:mrow><mml:mstyle mathvariant='bold-italic'><mml:mi>G</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold-italic'><mml:mi>v</mml:mi></mml:mstyle></mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>2</mml:mn></mml:mstyle></mml:msubsup></mml:mrow></mml:mfrac></mml:math></disp-formula>
</sec>
<sec>
<title>Cuckoo Search Optimization</title>
<p>The cuckoo generation function is projected using this approach, which reduces the implications. A large number of nests are accessible during the search procedure. The location of the cuckoo egg has been discovered as a novel solution (<xref ref-type="bibr" rid="B28">28</xref>). The steps in the search procedure are as follows. A cuckoo bird places one egg at a time in a randomly chosen nest. The parasite nests were static, and the number of eggs in the nests would increase until they reach their highest level. When the cuckoo&#x00027;s egg is spotted, the host bird seems to have the choice of chucking the egg away or scrapping the nest and forming a new one.</p>
<p>The Levy flight theory has improved the CS algorithm (<xref ref-type="bibr" rid="B29">29</xref>). This CS technique is used to calculate the appropriate threshold for eliminating the lung nodule.</p>
<p>The following analogy is incorporated into the proposed technique for optimum selection (<xref ref-type="table" rid="T3">Algorithm 1</xref>):</p>
<table-wrap position="float" id="T3">
<label>Algorithm 1</label>
<caption><p>Cuckoo search algorithm.</p></caption>
<table frame="hsides" rules="groups">
<tbody><tr>
<td valign="top" align="left">Step1:</td>
<td valign="top" align="left">Initialization parameters: n, Pa, &#x00026; M where n=number of host nests; pa : probability of discovery of alien, M: maximum number of iterations</td>
</tr>
<tr>
<td valign="top" align="left">Step2:</td>
<td valign="top" align="left">Generate initial n host, n_i<sup>t</sup></td>
</tr>
<tr>
<td valign="top" align="left">Step3:</td>
<td valign="top" align="left">Evaluate f(n_i<sup>t</sup>)</td>
</tr>
<tr>
<td valign="top" align="left">Step4:</td>
<td valign="top" align="left">Generate a new solution <inline-formula><mml:math id="M11"><mml:msubsup><mml:mrow><mml:mi>n</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:msubsup><mml:mrow><mml:mi>n</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msubsup><mml:mo>&#x0002B;</mml:mo><mml:mi>&#x003B1;</mml:mi><mml:mo>&#x02295;</mml:mo><mml:mi>L</mml:mi><mml:mi>e</mml:mi><mml:msup><mml:mrow><mml:mi>v</mml:mi></mml:mrow><mml:mrow><mml:msup><mml:mrow><mml:mtext>&#x00020;</mml:mtext></mml:mrow><mml:mrow><mml:mi>&#x02032;</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:msup><mml:mi>y</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>&#x003B3;</mml:mi><mml:mtext>&#x000A0;</mml:mtext></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
</tr>
<tr>
<td valign="top" align="left" colspan="2">Where the symbol &#x02295; is entry-wise multiplication, &#x003B1;&#x0003E;0 indicates the step size, Levy(&#x003B3;)= <italic>g</italic><sup>&#x02212;&#x003B3;</sup>(1 &#x0003C; &#x003B3; &#x02264; 3)</td>
</tr>
<tr>
<td valign="top" align="left">Step5:</td>
<td valign="top" align="left">Evaluate <italic>f</italic><inline-formula><mml:math id="M12"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msubsup><mml:mrow><mml:mi>n</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:mtext>&#x000A0;</mml:mtext><mml:mn>1</mml:mn></mml:mrow></mml:msubsup></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
</tr>
<tr>
<td valign="top" align="left">Step6:</td>
<td valign="top" align="left">Choose a nest <italic>n</italic><sub><italic>j</italic></sub>randomly</td>
</tr>
<tr>
<td valign="top" align="left">Step7:</td>
<td valign="top" align="left">If <inline-formula><mml:math id="M13"><mml:mo stretchy='false'>(</mml:mo><mml:msubsup><mml:mi>n</mml:mi><mml:mi>j</mml:mi><mml:mi>t</mml:mi></mml:msubsup><mml:mo stretchy='false'>)</mml:mo><mml:mo>&#x0003E;</mml:mo><mml:mo stretchy='false'>(</mml:mo><mml:msubsup><mml:mi>n</mml:mi><mml:mi>j</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mo stretchy='false'>)</mml:mo></mml:math></inline-formula>)&#x0003E; <inline-formula><mml:math id="M14"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msubsup><mml:mrow><mml:mi>n</mml:mi></mml:mrow><mml:mrow><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula> then Replace <inline-formula><mml:math id="M15"><mml:msubsup><mml:mrow><mml:mi>n</mml:mi></mml:mrow><mml:mrow><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msubsup><mml:mtext>&#x000A0;</mml:mtext><mml:mi>w</mml:mi><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mtext>&#x000A0;</mml:mtext><mml:msubsup><mml:mrow><mml:mi>n</mml:mi></mml:mrow><mml:mrow><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:mtext>&#x000A0;</mml:mtext><mml:mn>1</mml:mn></mml:mrow></mml:msubsup></mml:math></inline-formula></td>
</tr>
<tr>
<td valign="top" align="left">Step8:</td>
<td valign="top" align="left">Confiscate a worse nest with Pa</td>
</tr>
<tr>
<td valign="top" align="left">Step9:</td>
<td valign="top" align="left">Construct new nest using Levy flights</td>
</tr>
<tr>
<td valign="top" align="left">Step10:</td>
<td valign="top" align="left">Retain the best solutions</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The new solution, which depicts the subcategory of thresholds, is depicted by the egg of a cuckoo. This is also utilized for segmentation of the lung nodule. The grade of eggs for each host nest is either 0 or 1, which mimics the segmentation procedure&#x00027;s threshold partition. Pa is the probability that a cuckoo&#x00027;s egg will be discovered by the host bird. Pa has a predefined threshold. It demonstrates the principle of removing the least relevant threshold subgroups and, as a result, removing these threshold values from further analysis.</p></sec></sec>
<sec>
<title>Feature Extraction</title>
<p>The LBP operator was established to determine texture in the first place (<xref ref-type="bibr" rid="B30">30</xref>). By thresholding an image with the central pixel value and taking the result as a binary quantity, the operator applies a mark to each pixel (<xref ref-type="bibr" rid="B31">31</xref>). The picture of the Lung CT could be considered as a micro-pattern structure that the LBP operator can well portray. The steps for extracting the characteristics are outlined below.</p>
<list list-type="bullet">
<list-item><p>Divide the window looking at into cells.</p></list-item>
<list-item><p>In a cell, collate each pixel with its neighbors.</p></list-item>
<list-item><p>If the value of the center pixel is larger than the value of the neighbor pixel, assign &#x0201C;1&#x0201D;; otherwise, assign &#x0201C;0&#x0201D;.</p></list-item>
<list-item><p>A binary number is created by comparing all of the pixels.</p></list-item>
<list-item><p>Lastly, over the cell, compute the histogram. The LBP value can be calculated using the expression <inline-formula><mml:math id="M16"><mml:mi>L</mml:mi><mml:mi>B</mml:mi><mml:msubsup><mml:mrow><mml:mi>P</mml:mi></mml:mrow><mml:mrow><mml:mi>X</mml:mi><mml:mo>,</mml:mo><mml:mi>Y</mml:mi><mml:mtext>&#x000A0;&#x000A0;&#x000A0;</mml:mtext></mml:mrow><mml:mrow><mml:mi>U</mml:mi></mml:mrow></mml:msubsup></mml:math></inline-formula>where U represents uniform pattern and X,Y indicate neighborhood.</p></list-item>
</list>
<disp-formula id="E11"><label>(10)</label><mml:math id="M17"><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>L</mml:mi><mml:mi>B</mml:mi></mml:mstyle><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>P</mml:mi></mml:mstyle><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>X</mml:mi><mml:mo>,</mml:mo><mml:mi>Y</mml:mi><mml:mo>=</mml:mo></mml:mstyle><mml:mstyle displaystyle='true'><mml:msubsup><mml:mo>&#x02211;</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>0</mml:mn></mml:mstyle><mml:mrow><mml:mstyle mathvariant='bold-italic'><mml:mi>m</mml:mi></mml:mstyle><mml:mo>&#x02212;</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>1</mml:mn></mml:mstyle></mml:mrow></mml:msubsup><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>s</mml:mi></mml:mstyle><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>P</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>s</mml:mi></mml:mstyle></mml:msub><mml:mo>&#x02212;</mml:mo><mml:msub><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>P</mml:mi></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>c</mml:mi></mml:mstyle></mml:msub><mml:mtext>&#x000A0;</mml:mtext></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:msup><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>2</mml:mn></mml:mstyle><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>s</mml:mi></mml:mstyle></mml:msup></mml:mrow></mml:mstyle></mml:mrow></mml:msub></mml:math></disp-formula>
<disp-formula id="E12"><label>(11)</label><mml:math id="M18"><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>s</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mo>=</mml:mo></mml:mstyle><mml:mrow><mml:mo>{</mml:mo><mml:mrow><mml:mtable columnalign='left'><mml:mtr columnalign='left'><mml:mtd columnalign='left'><mml:mn>1</mml:mn></mml:mtd><mml:mtd columnalign='left'><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>t</mml:mi></mml:mstyle><mml:mo>&#x02265;</mml:mo><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mn>0</mml:mn></mml:mstyle></mml:mrow></mml:mtd></mml:mtr><mml:mtr columnalign='left'><mml:mtd columnalign='left'><mml:mn>0</mml:mn></mml:mtd><mml:mtd columnalign='left'><mml:mrow><mml:mstyle mathvariant='bold' mathsize='normal'><mml:mi>t</mml:mi><mml:mo>&#x0003C;</mml:mo><mml:mn>0</mml:mn></mml:mstyle></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:mrow></mml:mrow></mml:math></disp-formula>
<p>P<sub>c</sub> is the gray value of the center pixel, P s is the intensity value of the location pixels (m = 0, 1,......m-1), and m is the image element well within range R where R is higher than zero (R &#x0003E; 0), creating a regionally oriented neighborhood set. After identifying each pixel in a picture, a histogram is created to define the texture image (<xref ref-type="bibr" rid="B32">32</xref>, <xref ref-type="bibr" rid="B33">33</xref>).</p></sec></sec>
<sec id="s4">
<title>Classification</title>
<p>CNN belongs to DNN group which is comprised of numerous hidden layers, like RELU, fully linked, pooling and convolution layer, etc. CNN securities weights in the coevolutionary layer, which lowers the network latency, and enhances network performance (<xref ref-type="bibr" rid="B34">34</xref>). CNN&#x00027;s prominent features are prevalent weights, local networking, and neuronal 3D sizes. A feature map is created with a kernel by a convolution layer of diverse sub-regions of the input image (<xref ref-type="bibr" rid="B35">35</xref>). Then, a nonlinear function is added to the RELU layer to progress the convergence possessions when the error is small. The architecture of CNN is as shown in <xref ref-type="fig" rid="F2">Figure 2</xref>.</p>
<fig id="F2" position="float">
<label>Figure 2</label>
<caption><p>Architecture of CNN.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpubh-10-769692-g0002.tif"/>
</fig>
<p>In relation to complex layers, CNNs quite often incorporate pooling layers (<xref ref-type="bibr" rid="B36">36</xref>). They are principally used only for lessening the dimensions of the tensor and speeding up estimations (<xref ref-type="bibr" rid="B37">37</xref>). All such layers are simple. So, the image is to be split up into smaller portions in the pooling layer and for each portion, the maximum value is selected and then accomplished in some process for each portion (<xref ref-type="bibr" rid="B38">38</xref>). After being portioned, it is placed in the output in the respective position. RELU is a rectified linear unit, as well as a form of hidden layers. The activation function is most popularly used in neural networks, predominantly in CNNs (<xref ref-type="bibr" rid="B34">34</xref>).</p></sec>
<sec id="s5">
<title>Simulation Results</title>
<p>Lung cancer is diagnosed in CT medical images using the novel cuckoo search algorithm and attributes are determined in this study. Lung cancer CT images were collected from a private hospital (Satyam diagnostic center, Anantapur). The adaptive threshold issue in this study is referred to as an optimization problem and it can be resolved using the CSA approach. In this study, the outcomes of the suggested method were compared to those of the PSO and GA algorithms. This work has been carried out by MATLAB software. When compared to open-source tools, MATLAB has a great affinity with deep learning techniques as well as hardware tools. Also, open-source tools have a hard time bringing all of the libraries together in one spot.</p>
<p>This study relies on my prior work (<xref ref-type="bibr" rid="B39">39</xref>), in which the outcomes were produced using a Genetic algorithm and particle swarm optimization approaches in addition to LBP and CNN. Cuckoo search optimization is used in this work, together with CNN and LBP, to enhance the accuracy. The input and median filter output of CT lung cancer pictures are depicted in <xref ref-type="fig" rid="F3">Figure 3</xref>. Low-frequency noise and distortion are common in CT scan images. To reduce noise and distortion, the input image is processed with a median filter.</p>
<fig id="F3" position="float">
<label>Figure 3</label>
<caption><p><bold>(A)</bold> Input CT image. <bold>(B)</bold> Filtered output.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpubh-10-769692-g0003.tif"/>
</fig>
<p>To identify lung lesions in the CT image, it is split into multiple clusters and then optimized using the Otsu thresholding approach. The CT scan is first split using simple Otsu thresholding, which improves the segmented classes&#x00027; variation, or &#x0201C;all class variance.&#x0201D; The result of the thresholding technique can be enhanced by processing it with cuckoo search optimization. Following partitioning, the image is subjected to LBP feature extraction, which extracts the textural features before extracting the detected output (see <xref ref-type="fig" rid="F4">Figure 4</xref>).</p>
<fig id="F4" position="float">
<label>Figure 4</label>
<caption><p><bold>(A)</bold> Extracted output. <bold>(B)</bold> Segmented output.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpubh-10-769692-g0004.tif"/>
</fig>
<p>After image retrieval, the image is given to CNN classification, which assesses the image as normal or abnormal by showing a message such as &#x0201C;Tumor is MALIGNANT&#x0201D; or &#x0201C;Tumor is BENIGN,&#x0201D; as illustrated in <xref ref-type="fig" rid="F5">Figure 5</xref>. The system&#x00027;s general function is created in a GUI, as seen in <xref ref-type="fig" rid="F6">Figure 6</xref>. The statistical results like performance metrics obtained by 200 iterations for the input image are shown in graphical representation in <xref ref-type="fig" rid="F7">Figure 7</xref>.</p>
<fig id="F5" position="float">
<label>Figure 5</label>
<caption><p>Classification output.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpubh-10-769692-g0005.tif"/>
</fig>
<fig id="F6" position="float">
<label>Figure 6</label>
<caption><p>GUI output.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpubh-10-769692-g0006.tif"/>
</fig>
<fig id="F7" position="float">
<label>Figure 7</label>
<caption><p>Statistical results graphical representation.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpubh-10-769692-g0007.tif"/>
</fig>
<p><xref ref-type="table" rid="T1">Table 1</xref> showed that the suggested approach yields higher accuracy of 97%, the sensitivity of 97.8%, specificity of 92.6%, PSNR of 45.38%, and low MSE of 0.013 than conventional systems. These optimum results are obtained for 200 iterations (Our earlier proposed systems).</p>
<table-wrap position="float" id="T1">
<label>Table 1</label>
<caption><p>Attributed obtained from the proposed method.</p></caption>
<table frame="hsides" rules="groups">
<thead><tr>
<th valign="top" align="left"><bold>Parameters</bold></th>
<th valign="top" align="left"><bold>Proposed Method (CSO&#x0002B;CNN&#x0002B;LBP)</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">MSE</td>
<td valign="top" align="left">0.013</td>
</tr>
<tr>
<td valign="top" align="left">PSNR (%)</td>
<td valign="top" align="left">45.38</td>
</tr>
<tr>
<td valign="top" align="left">Specificity (%)</td>
<td valign="top" align="left">92.672</td>
</tr>
<tr>
<td valign="top" align="left">Sensitivity (%)</td>
<td valign="top" align="left">97.806</td>
</tr>
<tr>
<td valign="top" align="left">Accuracy (%)</td>
<td valign="top" align="left">96.979</td>
</tr>
</tbody>
</table>
</table-wrap>
<p><xref ref-type="table" rid="T2">Table 2</xref> shows that the suggested approach has higher accuracy (97%) than conventional systems (Our earlier proposed systems). The Comparative Results Graphical Representation is shown in <xref ref-type="fig" rid="F8">Figure 8</xref>.</p>
<table-wrap position="float" id="T2">
<label>Table 2</label>
<caption><p>Comparative Results with proposed method.</p></caption>
<table frame="hsides" rules="groups">
<thead><tr>
<th valign="top" align="left"><bold>Parameters</bold></th>
<th valign="top" align="left"><bold>Proposed Method (CSO&#x0002B;CNN&#x0002B;LBP)</bold></th>
<th valign="top" align="left"><bold>(PSO&#x0002B;SVM&#x0002B;LBP)</bold></th>
<th valign="top" align="left"><bold>(GA&#x0002B;SVM<bold>&#x0002B;</bold>LBP)</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">MSE</td>
<td valign="top" align="left">0.013</td>
<td valign="top" align="left">0.0301</td>
<td valign="top" align="left">0.0651</td>
</tr>
<tr>
<td valign="top" align="left">PSNR</td>
<td valign="top" align="left">45.38</td>
<td valign="top" align="left">33.2788</td>
<td valign="top" align="left">27.5311</td>
</tr>
<tr>
<td valign="top" align="left">Specificity (%)</td>
<td valign="top" align="left">92.672</td>
<td valign="top" align="left">60.0000</td>
<td valign="top" align="left">90.4950</td>
</tr>
<tr>
<td valign="top" align="left">Sensitivity (%)</td>
<td valign="top" align="left">97.806</td>
<td valign="top" align="left">96.5783</td>
<td valign="top" align="left">83.7143</td>
</tr>
<tr>
<td valign="top" align="left">Accuracy (%)</td>
<td valign="top" align="left">96.979</td>
<td valign="top" align="left">96.9391</td>
<td valign="top" align="left">90.4937</td>
</tr>
</tbody>
</table>
</table-wrap>
<fig id="F8" position="float">
<label>Figure 8</label>
<caption><p>Comparative results graphical representation.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpubh-10-769692-g0008.tif"/>
</fig></sec>
<sec sec-type="conclusions" id="s6">
<title>Conclusion</title>
<p>In this article, a strong approach for recognizing lung cancer in CT images is developed. For exact cancer diagnosis in CT lung images, the Otsu thresholding-based cuckoo search optimization and CNN classifier approach were used. Based on the simulation findings, it is observed that the suggested method reliably segments CT images and detects lesions of various forms and sizes. Subsequently, the proposed approach comprises successive stages that continuously yield the last detection result. The techniques that are utilized in different stages are basic and simple to actualize.</p>
<p>Based on the simulation findings, the accuracy of the proposed framework&#x00027; is calculated to be 96.97%, which is greater than any other demonstrative framework found in the literature. As for future work, a powerful strategy could be created by supplanting the CNN Classifier with a profound deep learning method and CAD tools. One may improve the structure for images of lung cancer in different modalities.</p></sec>
<sec sec-type="data-availability" id="s7">
<title>Data Availability Statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p></sec>
<sec id="s8">
<title>Author Contributions</title>
<p>CV: conceptualization, methodology, software, and visualization. KR: data curation, writing&#x02014;original draft, data analysis, and investigation. SL: software, validation, and editing. SB: software, validation, and editing. AM: supervision, writing&#x02014;review &#x00026; editing. SA: software, validation, and editing. All authors contributed to the article and approved the submitted version.</p></sec>
<sec sec-type="COI-statement" id="conf1">
<title>Conflict of Interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p></sec>
<sec sec-type="disclaimer" id="s9">
<title>Publisher&#x00027;s Note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p></sec> </body>
<back>
<ack><p>The authors express sincere thanks to Dr. M. Vijay Kumar, Radiologist from Star Diagnostics Hospital at Ananthapuramu, for providing the lung CT images to carry-out this research work.</p>
</ack>
<ref-list>
<title>References</title>
<ref id="B1">
<label>1.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Manickavasagam</surname> <given-names>R</given-names></name> <name><surname>Selvan</surname> <given-names>S</given-names></name></person-group>. <article-title>Automatic detection and classification of lung nodules in CT image using optimized neuro fuzzy classifier with cuckoo search algorithm</article-title>. <source>J Med Syst.</source> (<year>2019</year>) <volume>43</volume>:<fpage>1</fpage>&#x02013;<lpage>9</lpage>. <pub-id pub-id-type="doi">10.1007/s10916-019-1177-9</pub-id><pub-id pub-id-type="pmid">30758682</pub-id></citation></ref>
<ref id="B2">
<label>2.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Brown</surname> <given-names>MS</given-names></name> <name><surname>McNitt-Gray</surname> <given-names>MF</given-names></name></person-group>. <article-title>Patient-specific models for lung nodule detection and surveillance in CT images</article-title>. <source>Proc IEEE Trans Med Imag</source>. (<year>2001</year>) <volume>20</volume>:<fpage>1242</fpage>&#x02013;<lpage>50</lpage>. <pub-id pub-id-type="doi">10.1109/42.974919</pub-id><pub-id pub-id-type="pmid">11811824</pub-id></citation></ref>
<ref id="B3">
<label>3.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cosman</surname> <given-names>PC</given-names></name> <name><surname>Tseng</surname> <given-names>C</given-names></name> <name><surname>Gray</surname> <given-names>RM</given-names></name></person-group>. <article-title>Tree-structured vector quantization of CT chest scans: image quality and diagnostic accuracy</article-title>. <source>Proc IEEE Trans Med Imag</source>. (<year>1993</year>) <volume>12</volume>:<fpage>727</fpage>&#x02013;<lpage>39</lpage>.<pub-id pub-id-type="pmid">18218468</pub-id></citation></ref>
<ref id="B4">
<label>4.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dewes</surname> <given-names>P</given-names></name> <name><surname>Frellesen</surname> <given-names>C</given-names></name> <name><surname>Al-Butmeh</surname> <given-names>F</given-names></name> <name><surname>Albrecht</surname> <given-names>MH</given-names></name> <name><surname>Scholtz</surname> <given-names>J-E</given-names></name> <name><surname>Metzger</surname> <given-names>SC</given-names></name> <etal/></person-group>. <article-title>Comparative evaluation of non-contrast CAIPIRINHA-VIBE 3T-MRI and multidetector CT for detection of pulmonary nodules: in vivo evaluation of diagnostic accuracy and image quality</article-title>. <source>Eur J Radiol</source>. (<year>2016</year>) <volume>85</volume>:<fpage>193</fpage>&#x02013;<lpage>8</lpage>. <pub-id pub-id-type="doi">10.1016/j.ejrad.2015.11.020</pub-id><pub-id pub-id-type="pmid">26724665</pub-id></citation></ref>
<ref id="B5">
<label>5.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Georg Homann</surname> <given-names>MD</given-names></name> <name><surname>Mustaf</surname> <given-names>DF</given-names></name></person-group>. <article-title>Improved detection of bone metastases from lung cancer in the thoracic cage using 5- and 1-mm axial images versus a new CT software generating rib unfolding images</article-title>. <source>J Acad Radiol</source>. (<year>2015</year>) <volume>22</volume>:<fpage>505</fpage>&#x02013;<lpage>12</lpage>. <pub-id pub-id-type="doi">10.1016/j.acra.2014.12.005</pub-id><pub-id pub-id-type="pmid">25586709</pub-id></citation></ref>
<ref id="B6">
<label>6.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>De Nunzio</surname> <given-names>G</given-names></name> <name><surname>Massafra</surname> <given-names>A</given-names></name></person-group>. <article-title>Approaches to juxta-pleural nodule detection in CT images within the MAGIC-5 collaboration</article-title>. <source>J Nucl Instrum Methods Phys Res</source>. (<year>2011</year>) <volume>648</volume>:<fpage>103</fpage>&#x02013;<lpage>6</lpage>. <pub-id pub-id-type="doi">10.1016/j.nima.2010.12.082</pub-id></citation>
</ref>
<ref id="B7">
<label>7.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dhanamjayulu</surname> <given-names>C</given-names></name> <name><surname>Nizhal</surname> <given-names>UN</given-names></name> <name><surname>Maddikunta</surname> <given-names>PKR</given-names></name> <name><surname>Gadekallu</surname> <given-names>TR</given-names></name> <name><surname>Lwendi</surname> <given-names>C</given-names></name> <name><surname>Wei</surname> <given-names>C</given-names></name> <etal/></person-group>. <article-title>Identification of malnutrition and prediction of BMI from facial images using real-time image processing and machine learning</article-title>. <source>IET Image Process.</source> (<year>2021</year>) <volume>16</volume>:<fpage>647</fpage>&#x02013;<lpage>58</lpage>. <pub-id pub-id-type="doi">10.1049/ipr2.12222</pub-id></citation>
</ref>
<ref id="B8">
<label>8.</label>
<citation citation-type="web"><person-group person-group-type="author"><collab>American Cancer Society</collab></person-group>. Key Statistics for Lung Cancer. Available online at: <ext-link ext-link-type="uri" xlink:href="https://www.cancer.org/cancer/lung-cancer/about/key-statistics.html">https://www.cancer.org/cancer/lung-cancer/about/key-statistics.html</ext-link> (accessed January 5, 2022).</citation>
</ref>
<ref id="B9">
<label>9.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Prokop</surname> <given-names>M</given-names></name> <name><surname>Galanshi</surname> <given-names>M</given-names></name></person-group>. <source>Spiral and Multislice Computed Tomography of the Body</source>. <publisher-loc>Stuttgart</publisher-loc>: <publisher-name>Thime Medical Publishers</publisher-name> (<year>2003</year>)</citation>
</ref>
<ref id="B10">
<label>10.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Abbas</surname> <given-names>S</given-names></name> <name><surname>Jalil</surname> <given-names>Z</given-names></name> <name><surname>Javed</surname> <given-names>AR</given-names></name> <name><surname>Batool</surname> <given-names>I</given-names></name> <name><surname>Khan</surname> <given-names>MZ</given-names></name> <name><surname>Noorwali</surname> <given-names>A</given-names></name> <etal/></person-group>. <article-title>BCD-WERT: a novel approach for breast cancer detection using whale optimization based efficient features and extremely randomized tree algorithm</article-title>. <source>Peer J Comput Sci</source>. (<year>2021</year>) <volume>7</volume>:<fpage>e390</fpage>. <pub-id pub-id-type="doi">10.7717/peerj-cs.390</pub-id><pub-id pub-id-type="pmid">33817036</pub-id></citation></ref>
<ref id="B11">
<label>11.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Ananya</surname> <given-names>C</given-names></name> <name><surname>Rajamenakshi Subramanian</surname> <given-names>R</given-names></name> <name><surname>Gaur</surname> <given-names>S</given-names></name></person-group>. <article-title>A novel approach for tumor segmentation for lung cancer using multi-objective genetic algorithm and connected component analysis</article-title>. In: <source>Proceedings of the 2nd International Conference on Data Engineering and Communication Technology, Advances in Intelligent Systems and Computing Springer Nature.</source> <publisher-loc>Singapore</publisher-loc> (<year>2019</year>). pp. <fpage>367</fpage>&#x02013;<lpage>76</lpage>.</citation>
</ref>
<ref id="B12">
<label>12.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Venkatesh</surname> <given-names>C</given-names></name> <name><surname>Polaiah</surname> <given-names>B</given-names></name></person-group>. <article-title>A novel approach for lung lesion segmentation using optimization technique</article-title>. <source>Helix Scientific Explor.</source> (<year>2019</year>) <volume>94</volume>:<fpage>4832</fpage>&#x02013;<lpage>7</lpage>. <pub-id pub-id-type="doi">10.29042/2019-4832-4837</pub-id></citation>
</ref>
<ref id="B13">
<label>13.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Preeti</surname> <given-names>J</given-names></name> <name><surname>Bajaj</surname> <given-names>SB</given-names></name> <name><surname>Aman</surname> <given-names>J</given-names></name></person-group>. <article-title>&#x0201C;Segmentation and Detection of Lung Cancer Using Image Processing and Clustering Techniques&#x0201D; Springer Nature Singapore, Progress in Advanced Computing and Intelligent Engineering</article-title>. <source>Adv Intell Syst Comput</source>. (<year>2019</year>) <volume>1</volume>:<fpage>3</fpage>&#x02013;<lpage>23</lpage>. <pub-id pub-id-type="doi">10.1007/978-981-13-1708-8_2</pub-id></citation>
</ref>
<ref id="B14">
<label>14.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Senthil Kumar</surname> <given-names>K</given-names></name> <name><surname>Venkatalakshmi</surname> <given-names>K</given-names></name> <name><surname>Karthikeyan</surname> <given-names>K</given-names></name></person-group>. <article-title>Lung cancer detection using image segmentation by means of various evolutionary algorithms</article-title>. <source>Hindawi Comput Math Methods Med</source>. (<year>2019</year>) <volume>2019</volume>:<fpage>1</fpage>&#x02013;<lpage>6</lpage>. <pub-id pub-id-type="doi">10.1155/2019/4909846</pub-id><pub-id pub-id-type="pmid">30728852</pub-id></citation></ref>
<ref id="B15">
<label>15.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Perumal</surname> <given-names>S</given-names></name> <name><surname>Velmurugan</surname> <given-names>T</given-names></name></person-group>. <article-title>Lung cancer detection and classification on CT CT scan images using enhanced artificial bee colony optimization</article-title>. <source>Int J Eng Technol.</source> (<year>2018</year>) <volume>7</volume>:<fpage>74</fpage>&#x02013;<lpage>9</lpage>. <pub-id pub-id-type="doi">10.14419/ijet.v7i2.26.12538</pub-id></citation>
</ref>
<ref id="B16">
<label>16.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ammar</surname> <given-names>O</given-names></name> <name><surname>Ibrahim Al</surname> <given-names>A</given-names></name> <name><surname>Abrahim</surname> <given-names>B</given-names></name></person-group>. <article-title>Novel genetic algorithm for early prediction and detection of lung cancer</article-title>. <source>J Cancer Treat Res</source>. (<year>2017</year>) <volume>5</volume>:<fpage>15</fpage>&#x02013;<lpage>8</lpage>. <pub-id pub-id-type="doi">10.11648/j.jctr.20170502.13</pub-id><pub-id pub-id-type="pmid">23534801</pub-id></citation></ref>
<ref id="B17">
<label>17.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kamil</surname> <given-names>D</given-names></name> <name><surname>Ali</surname> <given-names>H</given-names></name> <name><surname>Yoney</surname> <given-names>KE</given-names></name></person-group>. <article-title>Lung lesion segmentation using gaussian filter and discrete wavelet transform</article-title>. <source>ITM Web Conf</source> . (<year>2017</year>) <volume>11</volume>:<fpage>1</fpage>&#x02013;<lpage>10</lpage>. <pub-id pub-id-type="doi">10.1051/itmconf/20171101018</pub-id></citation>
</ref>
<ref id="B18">
<label>18.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Mukesh CA</surname> <given-names>Dr</given-names></name> <name><surname>Bhumika</surname> <given-names>G</given-names></name></person-group>. <article-title>Detect mass tissue in lung images using discrete wavelet transformation</article-title>. In<italic>: Proceedings of the IEEE International Conference on Information Processing (IICIP)</italic> (New Delhi), (<year>2016</year>):<fpage>1</fpage>&#x02013;<lpage>10</lpage>.</citation>
</ref>
<ref id="B19">
<label>19.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Santos</surname> <given-names>AM</given-names></name> <name><surname>Ode</surname> <given-names>A</given-names></name> <name><surname>Filho</surname> <given-names>C</given-names></name> <name><surname>Silva</surname> <given-names>AC</given-names></name> <name><surname>Nunes</surname> <given-names>RA</given-names></name></person-group>. <article-title>Automatic detection of small lung nodules in 3D CT data using gaussian mixture models, Tsallis entropy and SVM</article-title>. <source>Eng Appl Artif Intell</source>. (<year>2014</year>) <volume>36</volume>:<fpage>27</fpage>&#x02013;<lpage>39</lpage>. <pub-id pub-id-type="doi">10.1016/j.engappai.2014.07.007</pub-id></citation>
</ref>
<ref id="B20">
<label>20.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jinsa</surname> <given-names>K</given-names></name> <name><surname>Gunavathi</surname> <given-names>K</given-names></name></person-group>. <article-title>Lung cancer classification using neural networks for CT images</article-title>. <source>Comput Methods Programs Biomed</source>. (<year>2014</year>) <volume>113</volume>:<fpage>202</fpage>&#x02013;<lpage>209</lpage>. <pub-id pub-id-type="doi">10.1016/j.cmpb.2013.10.011</pub-id><pub-id pub-id-type="pmid">32190744</pub-id></citation></ref>
<ref id="B21">
<label>21.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Anita</surname> <given-names>P</given-names></name> <name><surname>Raja</surname> <given-names>SK</given-names></name> <name><surname>Gandharba</surname> <given-names>S</given-names></name></person-group>. <article-title>Digital image steganography using LSB substitution, PVD, and EMD</article-title>. <source>Hindawi Math Probl Eng</source>. (<year>2018</year>) <volume>2018</volume>:<fpage>1</fpage>&#x02013;<lpage>11</lpage>. <pub-id pub-id-type="doi">10.1155/2018/1804953</pub-id></citation>
</ref>
<ref id="B22">
<label>22.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Venkatesh</surname> <given-names>C</given-names></name> <name><surname>Polaiah</surname> <given-names>B</given-names></name></person-group>. <article-title>An investigation of diverse optimization techniques on medical imagery for detection of perilous diseases</article-title>. <source>Front J Soc Technol Environ Sci</source>. (<year>2017</year>) <volume>62</volume>:<fpage>49</fpage>&#x02014;<lpage>255</lpage>. <pub-id pub-id-type="doi">10.1108/IJPCC-10-2020-0160</pub-id></citation>
</ref>
<ref id="B23">
<label>23.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Thippa Reddy</surname> <given-names>G</given-names></name> <name><surname>Khare</surname> <given-names>N</given-names></name> <name><surname>Bhattacharya</surname> <given-names>S</given-names></name> <name><surname>Singh</surname> <given-names>S</given-names></name> <name><surname>Maddikunta</surname> <given-names>PKR</given-names></name> <name><surname>In-Ho</surname> <given-names>Ra</given-names></name> <etal/></person-group>. <article-title>Early detection of diabetic retinopathy using PCA-firefly based deep learning model</article-title>. <source>Electronics</source>. (<year>2020</year>) <volume>9</volume>:<fpage>274</fpage>. <pub-id pub-id-type="doi">10.3390/electronics9020274</pub-id><pub-id pub-id-type="pmid">34749634</pub-id></citation></ref>
<ref id="B24">
<label>24.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Madhav</surname> <given-names>BTP</given-names></name> <name><surname>Pardhasaradhi</surname> <given-names>P</given-names></name></person-group>. Manepalli RKNR, Kishore PVV, Pisipat VGKM. Image enhancement using virtual contrast imagefusion on Fe3O4 and ZnO nanodispersed decyloxybenzoic acid. <source>Liq Cryst</source>. (<year>2018</year>) <volume>42</volume>:<fpage>1329</fpage>&#x02013;<lpage>36</lpage>. <pub-id pub-id-type="doi">10.1080/02678292.2015.1050704</pub-id></citation>
</ref>
<ref id="B25">
<label>25.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Prasanna</surname> <given-names>MN</given-names></name> <name><surname>Polaiah</surname> <given-names>B</given-names></name></person-group>. <article-title>Optimization of rotary kiln in cement industry using conventional control systems</article-title>. <source>Helix</source>. (<year>2019</year>) <volume>9</volume>:<fpage>4843</fpage>&#x02013;<lpage>49</lpage>. <pub-id pub-id-type="doi">10.29042/2019-4843-4849</pub-id></citation>
</ref>
<ref id="B26">
<label>26.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Venkatesh</surname> <given-names>C</given-names></name> <name><surname>Bhagyalakshmi</surname> <given-names>K</given-names></name> <name><surname>Sivayamini</surname> <given-names>L</given-names></name></person-group>. <article-title>Detection of diverse tumefactions in medial images by various cumulation methods</article-title>. <source>Int Res J Eng Technol (IRJET).</source> (<year>2017</year>) <volume>4</volume>:<fpage>1195</fpage>&#x02013;<lpage>200</lpage>.</citation>
</ref>
<ref id="B27">
<label>27.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Otsu</surname> <given-names>N</given-names></name></person-group>. <article-title>A threshold selection method from gray-level histograms</article-title>. <source>IEEE Trans Sys Man Cyber</source>. (<year>1979</year>) <volume>9</volume>:<fpage>62</fpage>&#x02013;<lpage>6</lpage>.</citation>
</ref>
<ref id="B28">
<label>28.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Xin-She</surname> <given-names>Y</given-names></name></person-group>. <source>Cuckoo Search and Firefly Algorithm Theory and Applications</source>. <publisher-loc>Springer</publisher-loc> (<year>2014</year>).</citation>
</ref>
<ref id="B29">
<label>29.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Yang</surname> <given-names>XS</given-names></name> <name><surname>Deb</surname> <given-names>S</given-names></name></person-group>. <article-title>Cuckoo search via levy flights</article-title>. In: <source>Proceedings of World Congress on Nature and Biologically Inspired Computing (NaBIC 2009)</source> (<publisher-loc>Coimbatore</publisher-loc>), (<year>2009</year>):<fpage>210</fpage>&#x02013;<lpage>14</lpage>.</citation>
</ref>
<ref id="B30">
<label>30.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ojala</surname> <given-names>T</given-names></name> <name><surname>Pietikainen</surname> <given-names>M</given-names></name> <name><surname>Harwood</surname> <given-names>D</given-names></name></person-group>. <article-title>A comparative study of texture measures with classification based on feature distributions</article-title>. <source>Pattern Recognit</source>. (<year>1996</year>) <volume>29</volume>:<fpage>51</fpage>&#x02013;<lpage>9</lpage>. <pub-id pub-id-type="doi">10.1016/0031-3203(95)00067-4</pub-id><pub-id pub-id-type="pmid">20800179</pub-id></citation></ref>
<ref id="B31">
<label>31.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Iwendi</surname> <given-names>C</given-names></name> <name><surname>Bashir</surname> <given-names>AK</given-names></name> <name><surname>Peshkar</surname> <given-names>A</given-names></name> <name><surname>Sujatha</surname> <given-names>R</given-names></name> <name><surname>Chatterjee</surname> <given-names>JM</given-names></name> <name><surname>Pasupuleti</surname> <given-names>S</given-names></name> <etal/></person-group>. <article-title>COVID-19 patient health prediction using boosted random forest algorithm</article-title>. <source>Front Public Health.</source> (<year>2020</year>) <volume>8</volume>:<fpage>357</fpage>. <pub-id pub-id-type="doi">10.3389/fpubh.2020.00357</pub-id><pub-id pub-id-type="pmid">32719767</pub-id></citation></ref>
<ref id="B32">
<label>32.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Aparna</surname> <given-names>P</given-names></name> <name><surname>Polurie</surname> <given-names>V</given-names></name> <name><surname>Vijay</surname> <given-names>K</given-names></name></person-group>. <article-title>An efficient medical image watermarking technique in e-healthcare application using hybridization of compression and cryptography algorithm</article-title>. <source>J Intell Syst</source>. (<year>2017</year>) <volume>27</volume>:<fpage>115</fpage>&#x02013;<lpage>33</lpage>. <pub-id pub-id-type="doi">10.1515/jisys-2017-0266</pub-id></citation>
</ref>
<ref id="B33">
<label>33.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Malakonda Reddy</surname> <given-names>B</given-names></name> <name><surname>Rahman ZUr</surname> <given-names>Md</given-names></name></person-group>. <article-title>SAR electromagnetic image conditioning using a new adaptive particle swarm optimization</article-title>. <source>ACES J</source>. (<year>2018</year>) <volume>33</volume>:<fpage>1439</fpage>&#x02013;<lpage>46</lpage>.</citation>
</ref>
<ref id="B34">
<label>34.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kumar</surname> <given-names>EK</given-names></name> <name><surname>Kishore</surname> <given-names>PVV</given-names></name> <name><surname>Sastry</surname> <given-names>ASCS</given-names></name> <name><surname>Teja Kiran Kumar</surname> <given-names>M</given-names></name> <name><surname>Anil Kumar</surname> <given-names>D</given-names></name></person-group>. <article-title>Training CNNs for 3-D sign language recognition with color texture coded joint angular displacement maps</article-title>. <source>IEEE Signal Process Lett</source>. (<year>2018</year>) <volume>25</volume>:<fpage>645</fpage>&#x02013;<lpage>9</lpage>. <pub-id pub-id-type="doi">10.1109/LSP.2018.2817179</pub-id></citation>
</ref>
<ref id="B35">
<label>35.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bhubaneswar</surname> <given-names>P</given-names></name> <name><surname>Brintha Therese</surname> <given-names>A</given-names></name></person-group>. <article-title>Detection of cancer in lung with K-NN classification using genetic algorithm</article-title>. <source>Proc Mater Sci</source>. (<year>2015</year>) <volume>10</volume>:<fpage>433</fpage>&#x02013;<lpage>440</lpage>. <pub-id pub-id-type="doi">10.1016/j.mspro.2015.06.077</pub-id><pub-id pub-id-type="pmid">30069674</pub-id></citation></ref>
<ref id="B36">
<label>36.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Othayoth</surname> <given-names>R</given-names></name> <name><surname>Mathi</surname> <given-names>P</given-names></name> <name><surname>Bheemanapally</surname> <given-names>K</given-names></name> <name><surname>Kakarla</surname> <given-names>L</given-names></name> <name><surname>Botlagunta</surname> <given-names>M</given-names></name></person-group>. <article-title>Characterization of vitamin-cisplatin-loaded chitosan nano-particles for chemoprevention and cancer fatigue</article-title>. <source>J Microencapsul</source>. (<year>2015</year>) <volume>32</volume>:<fpage>578</fpage>&#x02013;<lpage>88</lpage>. <pub-id pub-id-type="doi">10.3109/02652048.2015.1065921</pub-id><pub-id pub-id-type="pmid">26218628</pub-id></citation></ref>
<ref id="B37">
<label>37.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Svitlana</surname> <given-names>K</given-names></name> <name><surname>Chauhdar</surname> <given-names>SH</given-names></name> <name><surname>Iwendi</surname> <given-names>C</given-names></name> <name><surname>Liu</surname> <given-names>L</given-names></name> <name><surname>Yong</surname> <given-names>W</given-names></name> <name><surname>Bashir</surname> <given-names>AK</given-names></name></person-group>. <article-title>Socio-Technological factors affecting user&#x00027;s adoption of eHealth functionalities: a case study of China and Ukraine eHealth systems</article-title>. <source>IEEE Access</source>. (<year>2019</year>) <volume>7</volume>:<fpage>90777</fpage>&#x02013;<lpage>88</lpage>. <pub-id pub-id-type="doi">10.1109/ACCESS.2019.2924584</pub-id></citation>
</ref>
<ref id="B38">
<label>38.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ratna Bhargavi</surname> <given-names>V</given-names></name> <name><surname>Rajesh</surname> <given-names>V</given-names></name></person-group>. <article-title>Computer aided bright lesion classification in fundus image based on feature extraction</article-title>. <source>Intern J Pattern Recognit Artif Intell</source>. (<year>2018</year>) <volume>32</volume>:<fpage>1850034</fpage>. <pub-id pub-id-type="doi">10.1142/S0218001418500349</pub-id></citation>
</ref>
<ref id="B39">
<label>39.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Venkatesh</surname> <given-names>C</given-names></name> <name><surname>Bojja</surname> <given-names>P</given-names></name></person-group>. <article-title>Development of qualitative model for detection of lung cancer using optimization</article-title>. <source>Int J Innovative Technol Explor Eng</source>. (<year>2019</year>) <volume>8</volume>:<fpage>3143</fpage>&#x02013;<lpage>47</lpage>. <pub-id pub-id-type="doi">10.35940/ijitee.I8619.078919</pub-id></citation>
</ref>
</ref-list> 
</back>
</article> 
