<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.1 20151215//EN" "http://jats.nlm.nih.gov/publishing/1.1/JATS-journalpublishing1.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="1.1">
<front>
<journal-meta>
<journal-id journal-id-type="pmc">CMC</journal-id>
<journal-id journal-id-type="nlm-ta">CMC</journal-id>
<journal-id journal-id-type="publisher-id">CMC</journal-id>
<journal-title-group>
<journal-title>Computers, Materials &#x0026; Continua</journal-title>
</journal-title-group>
<issn pub-type="epub">1546-2226</issn>
<issn pub-type="ppub">1546-2218</issn>
<publisher>
<publisher-name>Tech Science Press</publisher-name>
<publisher-loc>USA</publisher-loc>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">23418</article-id>
<article-id pub-id-type="doi">10.32604/cmc.2022.023418</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Article</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>COVID-19 Severity Prediction Using Enhanced Whale with Salp Swarm Feature Classification</article-title>
<alt-title alt-title-type="left-running-head">COVID-19 Severity Prediction Using Enhanced Whale with Salp Swarm Feature Classification</alt-title>
<alt-title alt-title-type="right-running-head">COVID-19 Severity Prediction Using Enhanced Whale with Salp Swarm Feature Classification</alt-title>
</title-group>
<contrib-group content-type="authors">
<contrib id="author-1" contrib-type="author">
<name name-style="western"><surname>Budimirovic</surname><given-names>Nebojsa</given-names></name><xref ref-type="aff" rid="aff-1">1</xref></contrib>
<contrib id="author-2" contrib-type="author">
<name name-style="western"><surname>Prabhu</surname><given-names>E.</given-names></name><xref ref-type="aff" rid="aff-2">2</xref></contrib>
<contrib id="author-3" contrib-type="author">
<name name-style="western"><surname>Antonijevic</surname><given-names>Milos</given-names></name><xref ref-type="aff" rid="aff-1">1</xref></contrib>
<contrib id="author-4" contrib-type="author">
<name name-style="western"><surname>Zivkovic</surname><given-names>Miodrag</given-names></name><xref ref-type="aff" rid="aff-1">1</xref></contrib>
<contrib id="author-5" contrib-type="author" corresp="yes">
<name name-style="western"><surname>Bacanin</surname><given-names>Nebojsa</given-names></name><xref ref-type="aff" rid="aff-1">1</xref><email>nbacanin@singidunum.ac.rs</email>
</contrib>
<contrib id="author-6" contrib-type="author">
<name name-style="western"><surname>Strumberger</surname><given-names>Ivana</given-names></name><xref ref-type="aff" rid="aff-1">1</xref></contrib>
<contrib id="author-7" contrib-type="author">
<name name-style="western"><surname>Venkatachalam</surname><given-names>K.</given-names></name><xref ref-type="aff" rid="aff-3">3</xref></contrib>
<aff id="aff-1"><label>1</label><institution>Singidunum University</institution>, <addr-line>Belgrade, 11000</addr-line>, <country>Serbia</country></aff>
<aff id="aff-2"><label>2</label><institution>Department of Electronics and Communication Engineering, Amrita School of Engineering, Amrita Vishwa Vidyapeetham</institution>, <addr-line>Coimbatore, 641112</addr-line>, <country>India</country></aff>
<aff id="aff-3"><label>3</label><institution>Department of Applied Cybernetics, Faculty of Science, University of Hradec Kr&#x00E1;lov&#x00E9;</institution>, <addr-line>Hradec Kr&#x00E1;lov&#x00E9;, 50003</addr-line>, <country>Czech Republic</country></aff>
</contrib-group>
<author-notes>
<corresp id="cor1"><label>&#x002A;</label>Corresponding Author: Nebojsa Bacanin. Email: <email>nbacanin@singidunum.ac.rs</email></corresp>
</author-notes>
<pub-date pub-type="epub" date-type="pub" iso-8601-date="2022-02-21"><day>21</day>
<month>02</month>
<year>2022</year></pub-date>
<volume>72</volume>
<issue>1</issue>
<fpage>1685</fpage>
<lpage>1698</lpage>
<history>
<date date-type="received"><day>07</day><month>9</month><year>2021</year></date>
<date date-type="accepted"><day>06</day><month>1</month><year>2022</year></date>
</history>
<permissions>
<copyright-statement>&#x00A9; 2022 Budimirovic et al.</copyright-statement>
<copyright-year>2022</copyright-year>
<copyright-holder>Budimirovic et al.</copyright-holder>
<license xlink:href="https://creativecommons.org/licenses/by/4.0/">
<license-p>This work is licensed under a <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution 4.0 International License</ext-link>, which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.</license-p>
</license>
</permissions>
<self-uri content-type="pdf" xlink:href="TSP_CMC_23418.pdf"></self-uri>
<abstract>
<p>Computerized tomography (CT) scans and X-rays play an important role in the diagnosis of COVID-19 and pneumonia. On the basis of the image analysis results of chest CT and X-rays, the severity of lung infection is monitored using a tool. Many researchers have done in diagnosis of lung infection in an accurate and efficient takes lot of time and inefficient. To overcome these issues, our proposed study implements four cascaded stages. First, for pre-processing, a mean filter is used. Second, texture feature extraction uses principal component analysis (PCA). Third, a modified whale optimization algorithm is used (MWOA) for a feature selection algorithm. The severity of lung infection is detected on the basis of age group. Fourth, image classification is done by using the proposed MWOA with the salp swarm algorithm (MWOA-SSA). MWOA-SSA has an accuracy of 97&#x0025;, whereas PCA and MWOA have accuracies of 81&#x0025; and 86&#x0025;. The sensitivity rate of the MWOA-SSA algorithm is better that of than PCA (84.4&#x0025;) and MWOA (95.2&#x0025;). MWOA-SSA outperforms other algorithms with a specificity of 97.8&#x0025;. This proposed method improves the effective classification of lung affected images from large datasets.</p>
</abstract>
<kwd-group kwd-group-type="author">
<kwd>PCA</kwd>
<kwd>WOA</kwd>
<kwd>CT-image</kwd>
<kwd>lung infection</kwd>
<kwd>COVID-19</kwd>
</kwd-group>
</article-meta>
</front>
<body>
<sec id="s1"><label>1</label><title>Introduction</title>
<p>COVID-19 is a virus infection that has changed human life in various aspects including finance, education, health care, and supply chains. People with COVID-19 infection are facing respiratory problems and can recover with appropriate treatment effectively [<xref ref-type="bibr" rid="ref-1">1</xref>]. Many studies have been done in implementing classification and determining the presence of COVID-19 as well as in detecting the severity of pneumonia. CT and X-ray image modalities are non-invasive and used to detect and severity of lung infection [<xref ref-type="bibr" rid="ref-2">2</xref>,<xref ref-type="bibr" rid="ref-3">3</xref>]. In this study, we used principal component analysis (PCA) for feature extraction of CT images and a modified whale optimization algorithm (MWOA) for feature selection. To classify COVID-affected images from a large dataset and detect severity using the modified whale optimization algorithm (MWA) with the salp swarm algorithm (MWOA-SSA). The main disadvantage of existing algorithms are inefficiency, high execution time, and maximized error rate. To overcome these issues, our proposed MWOA-SSA has high potential in detecting the severity of lung infections such as pneumonia and classifying COVID-19 in affected and unaffected images from a large dataset effectively and quickly.</p>
<p>To predict coronavirus, X-ray images play a more important role than CT because the former is less sensitive. Furthermore, X-ray images are used to diagnose the early and mild stages of coronavirus patients. CT images are also is used in the diagnosis of coronavirus and improving efficiency in terms of dosage in radiation [<xref ref-type="bibr" rid="ref-4">4</xref>]. To enhance the improvement in scanning images in a sliced manner effectively by using multi-slice computerized tomography (MSCT) [<xref ref-type="bibr" rid="ref-5">5</xref>]. To achieve improvement in larger temporal resolution achieved by dual source CT image [<xref ref-type="bibr" rid="ref-6">6</xref>].</p>
<p>Machine learning algorithms have been used for the last decades in medical applications for computer-based diagnosis, helping physicians diagnose at earlier stages of diseases and providing better customized therapies to patients [<xref ref-type="bibr" rid="ref-7">7</xref>,<xref ref-type="bibr" rid="ref-8">8</xref>]. Approaches to find the best solution from all possible solutions of a particular radiology problem are known as meta-heuristic algorithms. The acceptable best solution of the optimization technique requires less computational effort within a stipulated time [<xref ref-type="bibr" rid="ref-9">9</xref>]. For the feature selection, the proposed MWOA is implemented with a binary optimizer in terms of average select size, error rate, mean, standard deviation, average fitness, best fitness, and worst fitness. The main contributions of this study are as follows,
<list list-type="order">
<list-item><p>A COVID-19 classification based on proposed algorithms for feature classification of WMOA-SSA is developed.</p></list-item>
<list-item><p>A novel approach in detecting severity of lung infection based on severity level is implemented.</p></list-item>
<list-item><p>The proposed WMOA-SSA can effectively classify the input CT images as COVID-19 or non-COVID-19.</p></list-item>
</list></p>
<p>The paper has been organized as follows. Section 2 presents the literature review. Section 3 introduces the classification of COVID-19 images using MWOA-SSA. Section 4 discusses the experimented results. Section 5 concludes the paper and provides future directions.</p>
</sec>
<sec id="s2"><label>2</label><title>Review of Literature</title>
<p>This section describes the recent literature on feature classification and prediction of coronavirus. COVID-19 has affected human beings in every aspect of their daily lives. To diagnosis the coronavirus disease by using various modalities of image such as CT and X-ray image. Through these images, physicians scan and diagnose at early stages and during disease progression. Many studies have been published on the prediction of coronavirus. Our aim is to achieve effectiveness in classifyingCOVID-19 case images from a large dataset and detect the severity of lung infections such as pneumonia. A previous paper [<xref ref-type="bibr" rid="ref-10">10</xref>] proposed evaluating the infection rate in CT scans of lungs using visual and coronal axes. By using visual inspection COVID-19 disease is used to identify the lung infection [<xref ref-type="bibr" rid="ref-11">11</xref>].</p>
<p>Another paper [<xref ref-type="bibr" rid="ref-12">12</xref>] proposed implementing a visual infection-based method to detect lung infection using lung CT scan. Authors in [<xref ref-type="bibr" rid="ref-13">13</xref>] implemented deep learning algorithms to identify and screen COVID-19 patients using the modality of CT images accurately. By using an artificial intelligence (AI) technique for diagnosis, COVID-19 patients are identified based on convolutional neural network (CNN) using CT slices images, helping accurately classify COVID-19 from non-COVID-19 groups [<xref ref-type="bibr" rid="ref-14">14</xref>]. The machine learning algorithm fractional multichannel exponent moments method is used to extract features from the chest X-ray image and used to classify COVID-19 or non-COVID-19 patients [<xref ref-type="bibr" rid="ref-15">15</xref>]. <xref ref-type="table" rid="table-1">Tab. 1</xref> shows a summary of recent research work in COVID-19.</p>
<table-wrap id="table-1"><label>Table 1</label><caption><title>Survey on existing algorithms</title></caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th align="left">Author name</th>
<th align="left">Modality of image</th>
<th align="left">Methods</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left">Hu et al. (2020) [<xref ref-type="bibr" rid="ref-16">16</xref>]</td>
<td align="left">CT image</td>
<td align="left">Supervised deep learning</td>
</tr>
<tr>
<td align="left">Nour et al. (2020) [<xref ref-type="bibr" rid="ref-17">17</xref>]</td>
<td align="left">X-Ray image</td>
<td align="left">CNN, support vector machine (SVM)</td>
</tr>
<tr>
<td align="left">Wu et al. (2020) [<xref ref-type="bibr" rid="ref-18">18</xref>]</td>
<td align="left">CT image</td>
<td align="left">ResNet50 based deep learning</td>
</tr>
<tr>
<td align="left">Ardakani et al. (2020) [<xref ref-type="bibr" rid="ref-14">14</xref>]</td>
<td align="left">CT image</td>
<td align="left">ResNet-101 based deep learning</td>
</tr>
<tr>
<td align="left">Zhang et al. (2020) [<xref ref-type="bibr" rid="ref-19">19</xref>]</td>
<td align="left">CT image</td>
<td align="left">AI based ResNet</td>
</tr>
<tr>
<td align="left">Panwar et al. (2020) [<xref ref-type="bibr" rid="ref-20">20</xref>]</td>
<td align="left">X-Ray image</td>
<td align="left">Transfer learning, deep CNN</td>
</tr>
<tr>
<td align="left">Butt et al. (2020) [<xref ref-type="bibr" rid="ref-21">21</xref>]</td>
<td align="left">CT image</td>
<td align="left">CNN</td>
</tr>
<tr>
<td align="left">Al-Tashi et al. (2019) [<xref ref-type="bibr" rid="ref-22">22</xref>]</td>
<td align="left">CT image</td>
<td align="left">Hybrid grey wolf optimization</td>
</tr>
<tr>
<td align="left">Ye et al. (2019) [<xref ref-type="bibr" rid="ref-23">23</xref>]</td>
<td align="left">CT image</td>
<td align="left">Adaptive statistical iterative reconstruction-V technique</td>
</tr>
<tr>
<td align="left">Yamashita et al. (2018) [<xref ref-type="bibr" rid="ref-24">24</xref>]</td>
<td align="left">X-ray image</td>
<td align="left">CNN</td>
</tr>
<tr>
<td align="left">Fu et al. (2018) [<xref ref-type="bibr" rid="ref-25">25</xref>]</td>
<td align="left">CT, X-Ray image</td>
<td align="left">Multimodal CNN</td>
</tr>
<tr>
<td align="left">Walker et al. (2017) [<xref ref-type="bibr" rid="ref-26">26</xref>]</td>
<td align="left">CT, X-Ray image</td>
<td align="left">Multimodal CNN</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3"><label>3</label><title>Enhanced Whale with Salp Swarm Optimization Methodology</title>
<p>This work introduces the concept of classification of affected lung disease and its severity. This proposed work has four stages. First, a median filter is used for pre-processing. Second, PCA is used for texture feature extraction. Third, A MWOA is used for selecting features. Fourth, the proposed MWOA-SSA is used for classification and identifying the severity. The architecture of our proposed method is given in <xref ref-type="fig" rid="fig-1">Fig. 1</xref>. CT scan images are collected and preprocessed using a median filter. PCA is used to remove unwanted textures in the images. Then, the images are processed using MWOA-SSA to classify the affected image.</p>
<fig id="fig-1"><label>Figure 1</label><caption><title>Architecture of proposed work</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_23418-fig-1.png"/></fig>
<p>This proposed work consists of four phases:
<list list-type="simple">
<list-item><p><bold>Phase 1:</bold> Pre-processing using a median filter.</p></list-item>
<list-item><p><bold>Phase 2:</bold> Feature extraction using PCA.</p></list-item>
<list-item><p><bold>Phase 3:</bold> Feature selection using MWOA.</p></list-item>
<list-item><p><bold>Phase 4:</bold> Proposed work on classification of infected lung images from a large dataset using MWOA-SSA.</p></list-item>
</list></p>
<sec id="s3_1"><label>3.1</label><title>Pre-Processing</title>
<p>The aim of pre-processing is to improve the high quality of the CT scan chest image. We need to denoise the image by applying a median filter. This median filter scans the entire image using an <inline-formula id="ieqn-1"><mml:math id="mml-ieqn-1"><mml:mn>8</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:mn>8</mml:mn></mml:math></inline-formula> matrix and replaces the center pixel value by choosing the median of all pixel values inside the <inline-formula id="ieqn-2"><mml:math id="mml-ieqn-2"><mml:mn>8</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:mn>8</mml:mn></mml:math></inline-formula> matrix by using
<disp-formula id="eqn-1"><label>(1)</label><mml:math id="mml-eqn-1" display="block"><mml:mi>i</mml:mi><mml:mi>m</mml:mi><mml:mi>g</mml:mi><mml:mo stretchy="false">[</mml:mo><mml:mrow><mml:mi>a</mml:mi><mml:mo>,</mml:mo><mml:mi>b</mml:mi></mml:mrow><mml:mo stretchy="false">]</mml:mo><mml:mo>=</mml:mo><mml:mi>m</mml:mi><mml:mi>e</mml:mi><mml:mi>d</mml:mi><mml:mi>i</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mo fence="false" stretchy="false">{</mml:mo><mml:mi>i</mml:mi><mml:mi>m</mml:mi><mml:mi>g</mml:mi><mml:mi>o</mml:mi><mml:mo stretchy="false">[</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mo stretchy="false">]</mml:mo><mml:mo>,</mml:mo><mml:mo fence="false" stretchy="false">{</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x2208;</mml:mo><mml:mi>y</mml:mi><mml:mo fence="false" stretchy="false">}</mml:mo></mml:math></disp-formula>where <italic>y</italic> is the neighborhood pixel value represented by the user and <inline-formula id="ieqn-3"><mml:math id="mml-ieqn-3"><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:math></inline-formula> is the center pixel value&#x0027;s location.</p>
</sec>
<sec id="s3_2"><label>3.2</label><title>Texture Feature Extraction Using PCA</title>
<p>The idea behind PCA is to map m-dimensional features to n dimensions that have a set of orthogonal feature values. Feature extraction using PCA meets the variance of sample pixel values after reduction of dimensionality and minimizes the error rate. The steps needed for texture feature extraction using PCA are given below, and <xref ref-type="fig" rid="fig-2">Fig. 2</xref>. Provides an overview of PCA operation.
</p>
<fig id="fig-5">
<graphic mimetype="image" mime-subtype="png" xlink:href="CMC_23418-fig-5.png"/>
</fig>
<fig id="fig-2"><label>Figure 2</label><caption><title>Overview of PCA</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_23418-fig-2.png"/></fig>
</sec>
<sec id="s3_3"><label>3.3</label><title>Feature Selection Using MWOA</title>
<p>Feature selection of brain image using MWOA, which is based on the behavior of whales, in which for trapping the prey bubbles are involved for searching in a spiral-shaped [<xref ref-type="bibr" rid="ref-27">27</xref>,<xref ref-type="bibr" rid="ref-28">28</xref>]. The whale is randomly selected, and it can be updated by the best whale value that gives the optimal solution.
<disp-formula id="eqn-7"><label>(7)</label><mml:math id="mml-eqn-7" display="block"><mml:mrow><mml:mover><mml:mi>F</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:mover><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>&#x2192;</mml:mo></mml:mover><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mover><mml:mi>A</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mspace width="thickmathspace" /><mml:mo>.</mml:mo><mml:mspace width="thickmathspace" /><mml:mrow><mml:mover><mml:mi>D</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mrow><mml:mover><mml:mrow><mml:mi>c</mml:mi><mml:mspace width="thickmathspace" /></mml:mrow><mml:mo>&#x2192;</mml:mo></mml:mover><mml:mo>.</mml:mo><mml:mspace width="thickmathspace" /><mml:mover><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>&#x2192;</mml:mo></mml:mover><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mover><mml:mi>F</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow></mml:mrow><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow></mml:math></disp-formula></p>
<p>To improve this result, the performance of three whales are randomly chosen, and it cannot be affected by the leader&#x0027;s position. <xref ref-type="fig" rid="fig-5">Eq. (6)</xref> is modified as follows:
<disp-formula id="eqn-8"><label>(8)</label><mml:math id="mml-eqn-8" display="block"><mml:mrow><mml:mover><mml:mi>F</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:mover><mml:mrow><mml:msub><mml:mi>w</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow><mml:mo>&#x2192;</mml:mo></mml:mover><mml:mo>&#x2217;</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mover><mml:mi>F</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>r</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mo>+</mml:mo><mml:mrow><mml:mover><mml:mi>x</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mo>&#x2217;</mml:mo><mml:mover><mml:mrow><mml:msub><mml:mi>w</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mo>&#x2192;</mml:mo></mml:mover><mml:mo>&#x2217;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mrow><mml:mrow><mml:mover><mml:mi>F</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow></mml:mrow><mml:mrow><mml:mi>r</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mn>2</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mrow><mml:mover><mml:mi>F</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow></mml:mrow><mml:mrow><mml:mi>r</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mn>3</mml:mn></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>+</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mover><mml:mi>x</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x2217;</mml:mo><mml:mover><mml:mrow><mml:msub><mml:mi>w</mml:mi><mml:mn>3</mml:mn></mml:msub></mml:mrow><mml:mo>&#x2192;</mml:mo></mml:mover><mml:mo>&#x2217;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mrow><mml:mover><mml:mi>F</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mrow><mml:mover><mml:mi>F</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow></mml:mrow><mml:mrow><mml:mi>r</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /></mml:math></disp-formula>where, <inline-formula id="ieqn-12"><mml:math id="mml-ieqn-12"><mml:mrow><mml:msub><mml:mrow><mml:mover><mml:mi>F</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>r</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula>, <inline-formula id="ieqn-13"><mml:math id="mml-ieqn-13"><mml:mrow><mml:msub><mml:mrow><mml:mover><mml:mi>F</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>r</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mn>2</mml:mn></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula> and <inline-formula id="ieqn-14"><mml:math id="mml-ieqn-14"><mml:mrow><mml:msub><mml:mrow><mml:mover><mml:mi>F</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>r</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mn>3</mml:mn></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula> are randomly chosen solutions (prey). <inline-formula id="ieqn-15"><mml:math id="mml-ieqn-15"><mml:mover><mml:mrow><mml:msub><mml:mi>w</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow><mml:mo>&#x2192;</mml:mo></mml:mover></mml:math></inline-formula> is a random value between [0,0.5]. <inline-formula id="ieqn-16"><mml:math id="mml-ieqn-16"><mml:mover><mml:mrow><mml:msub><mml:mi>w</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mo>&#x2192;</mml:mo></mml:mover></mml:math></inline-formula> and <inline-formula id="ieqn-17"><mml:math id="mml-ieqn-17"><mml:mover><mml:mrow><mml:msub><mml:mi>w</mml:mi><mml:mn>3</mml:mn></mml:msub></mml:mrow><mml:mo>&#x2192;</mml:mo></mml:mover></mml:math></inline-formula> are random values between [0,1]. <inline-formula id="ieqn-18"><mml:math id="mml-ieqn-18"><mml:mrow><mml:mover><mml:mi>x</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow></mml:math></inline-formula> decreases the value and smoothens exploration and exploitation by using
<disp-formula id="eqn-9"><label>(9)</label><mml:math id="mml-eqn-9" display="block"><mml:mrow><mml:mover><mml:mi>x</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mo>=</mml:mo><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mfrac><mml:mrow><mml:mi>i</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>M</mml:mi><mml:mi>a</mml:mi><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow></mml:mfrac></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:math></disp-formula>where t represents iteration number, and <inline-formula id="ieqn-19"><mml:math id="mml-ieqn-19"><mml:mi>M</mml:mi><mml:mi>a</mml:mi><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula> represents the maximum number of iterations.</p>
<p>The algorithm is given as follows:
</p>
<fig id="fig-6">
<graphic mimetype="image" mime-subtype="png" xlink:href="CMC_23418-fig-6.png"/>
</fig>
</sec>
<sec id="s3_4"><label>3.4</label><title>Proposed Feature Classification Using MWOA-SSA</title>
<p>In this phase, classification of infected lung images from a large dataset is done using MWOA-SSA. To improve the accuracy and optimal solution, the SSA) is used with MWOA. This SSA randomly initializes the swarm of N salps. The swarm is represented by the 2-D matrix <inline-formula id="ieqn-41"><mml:math id="mml-ieqn-41"><mml:mi>m</mml:mi><mml:mi>a</mml:mi><mml:mi>t</mml:mi></mml:math></inline-formula>. Searching food for swarm is represented as <inline-formula id="ieqn-42"><mml:math id="mml-ieqn-42"><mml:mi>s</mml:mi><mml:mi>f</mml:mi></mml:math></inline-formula>, and leader&#x0027;s movement in the form of a chain is denoted as <inline-formula id="ieqn-43"><mml:math id="mml-ieqn-43"><mml:mi>s</mml:mi><mml:msubsup><mml:mi>x</mml:mi><mml:mi>i</mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x2032;</mml:mi></mml:mrow></mml:msubsup></mml:math></inline-formula>. It is represented by using
<disp-formula id="eqn-10"><label>(10)</label><mml:math id="mml-eqn-10" display="block"><mml:mi>s</mml:mi><mml:msubsup><mml:mi>x</mml:mi><mml:mi>i</mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x2032;</mml:mi></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mrow><mml:mtable columnalign="left" rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>s</mml:mi><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>+</mml:mo><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>u</mml:mi><mml:mi>p</mml:mi><mml:mrow><mml:msub><mml:mi>l</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>l</mml:mi><mml:mi>o</mml:mi><mml:mi>w</mml:mi><mml:mrow><mml:msub><mml:mi>l</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mo>+</mml:mo><mml:mi>l</mml:mi><mml:mi>o</mml:mi><mml:mi>w</mml:mi><mml:mrow><mml:msub><mml:mi>l</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>,</mml:mo><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>3</mml:mn></mml:msub></mml:mrow><mml:mspace width="thickmathspace" /><mml:mo>&#x2265;</mml:mo><mml:mn>0.5</mml:mn></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:mi>s</mml:mi><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>+</mml:mo><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>u</mml:mi><mml:mi>p</mml:mi><mml:mrow><mml:msub><mml:mi>l</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>l</mml:mi><mml:mi>o</mml:mi><mml:mi>w</mml:mi><mml:mrow><mml:msub><mml:mi>l</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mo>+</mml:mo><mml:mi>l</mml:mi><mml:mi>o</mml:mi><mml:mi>w</mml:mi><mml:mrow><mml:msub><mml:mi>l</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>,</mml:mo><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>3</mml:mn></mml:msub></mml:mrow><mml:mo>&#x003C;</mml:mo><mml:mn>0.5</mml:mn></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo fence="true" stretchy="true" symmetric="true"></mml:mo></mml:mrow></mml:math></disp-formula>where <italic>i</italic> is swarm&#x0027;s dimension position, and it is updated. <inline-formula id="ieqn-44"><mml:math id="mml-ieqn-44"><mml:mi>s</mml:mi><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> is the <inline-formula id="ieqn-45"><mml:math id="mml-ieqn-45"><mml:mrow><mml:msup><mml:mi>i</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mi>h</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:math></inline-formula> position for a source of food. <inline-formula id="ieqn-46"><mml:math id="mml-ieqn-46"><mml:mi>u</mml:mi><mml:mi>p</mml:mi><mml:mrow><mml:msub><mml:mi>l</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> and <inline-formula id="ieqn-47"><mml:math id="mml-ieqn-47"><mml:mi>l</mml:mi><mml:mi>o</mml:mi><mml:mi>w</mml:mi><mml:mrow><mml:msub><mml:mi>l</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> are the upper and lower limits of the <inline-formula id="ieqn-48"><mml:math id="mml-ieqn-48"><mml:mrow><mml:msup><mml:mi>i</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mi>h</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:math></inline-formula> element. <inline-formula id="ieqn-49"><mml:math id="mml-ieqn-49"><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow></mml:math></inline-formula> is a dynamic variable for iteration. <inline-formula id="ieqn-50"><mml:math id="mml-ieqn-50"><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow></mml:math></inline-formula> and <inline-formula id="ieqn-51"><mml:math id="mml-ieqn-51"><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>3</mml:mn></mml:msub></mml:mrow></mml:math></inline-formula> are random numbers between [0,1] calculated as
<disp-formula id="eqn-11"><label>(11)</label><mml:math id="mml-eqn-11" display="block"><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mn>2</mml:mn><mml:mrow><mml:msup><mml:mi>e</mml:mi><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msup><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mrow><mml:mrow><mml:mn>4</mml:mn><mml:mi>i</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mspace width="negativethinmathspace" /><mml:mrow><mml:mrow><mml:mo>/</mml:mo></mml:mrow><mml:mrow><mml:mrow><mml:mpadded width="0"><mml:mphantom><mml:mrow><mml:mn>4</mml:mn><mml:mi>i</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:mphantom></mml:mpadded></mml:mrow></mml:mrow></mml:mrow><mml:mspace width="negativethinmathspace" /><mml:mrow><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:mrow></mml:msup></mml:mrow></mml:math></disp-formula>where <inline-formula id="ieqn-52"><mml:math id="mml-ieqn-52"><mml:mi>i</mml:mi><mml:mi>t</mml:mi></mml:math></inline-formula> represents the current iteration and <inline-formula id="ieqn-53"><mml:math id="mml-ieqn-53"><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:math></inline-formula> is the maximum number of iterations. <inline-formula id="ieqn-54"><mml:math id="mml-ieqn-54"><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow></mml:math></inline-formula> is a control variable that controls the balance between exploitation and exploration of the optimization algorithm. It is represented as
<disp-formula id="eqn-12"><label>(12)</label><mml:math id="mml-eqn-12" display="block"><mml:mi>s</mml:mi><mml:msubsup><mml:mi>x</mml:mi><mml:mi>i</mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x2032;</mml:mi></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:mn>0.5</mml:mn><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>s</mml:mi><mml:msubsup><mml:mi>x</mml:mi><mml:mi>i</mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x2032;</mml:mi></mml:mrow></mml:msubsup><mml:mo>&#x2212;</mml:mo><mml:mi>s</mml:mi><mml:msubsup><mml:mi>x</mml:mi><mml:mi>i</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula>
The procedure for SSA is given as follows.
</p>
<fig id="fig-7">
<graphic mimetype="image" mime-subtype="png" xlink:href="CMC_23418-fig-7.png"/>
</fig>
</sec>
</sec>
<sec id="s4"><label>4</label><title>Results and Analysis</title>
<p>For the experimental result, data are collected from the Kaggle dataset [<xref ref-type="bibr" rid="ref-29">29</xref>], which has 1,500 CT images of COVID-19 and non-COVID 19. MWOA-SSA is compared with the existing algorithms MWOA [<xref ref-type="bibr" rid="ref-30">30</xref>] and SSA [<xref ref-type="bibr" rid="ref-31">31</xref>] by using performance metric measures of sensitivity, specificity, accuracy, precision (PPV), F-score, and negative predictive value (NPV).
<disp-formula id="ueqn-7">
<mml:math id="mml-ueqn-7" display="block"><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>T</mml:mi><mml:mi>r</mml:mi><mml:mi>u</mml:mi><mml:mi>e</mml:mi><mml:mspace width="thickmathspace" /><mml:mi>P</mml:mi><mml:mi>o</mml:mi><mml:mi>s</mml:mi><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>i</mml:mi><mml:mi>v</mml:mi><mml:mi>e</mml:mi><mml:mo>,</mml:mo><mml:mspace width="thinmathspace" /><mml:mi>T</mml:mi><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>T</mml:mi><mml:mi>r</mml:mi><mml:mi>u</mml:mi><mml:mi>e</mml:mi><mml:mspace width="thickmathspace" /><mml:mi>N</mml:mi><mml:mi>e</mml:mi><mml:mi>g</mml:mi><mml:mi>a</mml:mi><mml:mi>t</mml:mi><mml:mi>i</mml:mi><mml:mi>v</mml:mi><mml:mi>e</mml:mi><mml:mo>,</mml:mo><mml:mspace width="thinmathspace" /><mml:mi>F</mml:mi><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>F</mml:mi><mml:mi>a</mml:mi><mml:mi>l</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mspace width="thickmathspace" /><mml:mi>N</mml:mi><mml:mi>e</mml:mi><mml:mi>g</mml:mi><mml:mi>a</mml:mi><mml:mi>t</mml:mi><mml:mi>i</mml:mi><mml:mi>v</mml:mi><mml:mi>e</mml:mi><mml:mo>,</mml:mo><mml:mspace width="thinmathspace" /><mml:mi>F</mml:mi><mml:mi>P</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>F</mml:mi><mml:mi>a</mml:mi><mml:mi>l</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mspace width="thickmathspace" /><mml:mi>P</mml:mi><mml:mi>o</mml:mi><mml:mi>s</mml:mi><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>i</mml:mi><mml:mi>v</mml:mi><mml:mi>e</mml:mi><mml:mo>,</mml:mo></mml:math></disp-formula></p>
<p>These metric measures are defined by using:</p>
<p><bold>Sensitivity</bold></p>
<p>It is also called true positive rate or recall.
<disp-formula id="eqn-13"><label>(13)</label><mml:math id="mml-eqn-13" display="block"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:mi>s</mml:mi><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>i</mml:mi><mml:mi>v</mml:mi><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>y</mml:mi><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>N</mml:mi></mml:mrow></mml:mfrac></mml:math></disp-formula></p>
<p><bold>Specificity</bold></p>
<p>It is called true negative rate (TNR).
<disp-formula id="eqn-14"><label>(14)</label><mml:math id="mml-eqn-14" display="block"><mml:mi>S</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:mi>i</mml:mi><mml:mi>f</mml:mi><mml:mi>i</mml:mi><mml:mi>c</mml:mi><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>y</mml:mi><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mi>T</mml:mi><mml:mi>N</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mi>N</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>P</mml:mi></mml:mrow></mml:mfrac></mml:math></disp-formula></p>
<p><bold>Accuracy</bold>
<disp-formula id="eqn-15"><label>(15)</label><mml:math id="mml-eqn-15" display="block"><mml:mi>A</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:mi>u</mml:mi><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>y</mml:mi><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>+</mml:mo><mml:mi>T</mml:mi><mml:mi>N</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>+</mml:mo><mml:mi>T</mml:mi><mml:mi>N</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>P</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>N</mml:mi></mml:mrow></mml:mfrac></mml:math></disp-formula></p>
<p><bold>Precision</bold></p>
<p>It is called positive predictive value (PPV).
<disp-formula id="eqn-16"><label>(16)</label><mml:math id="mml-eqn-16" display="block"><mml:mi>P</mml:mi><mml:mi>P</mml:mi><mml:mi>V</mml:mi><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>P</mml:mi></mml:mrow></mml:mfrac></mml:math></disp-formula></p>
<p><bold>Negative Predictive Value</bold></p>
<p>It evaluates true negatives for all negative values by using
<disp-formula id="eqn-17"><label>(17)</label><mml:math id="mml-eqn-17" display="block"><mml:mi>N</mml:mi><mml:mi>P</mml:mi><mml:mi>V</mml:mi><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mi>T</mml:mi><mml:mi>N</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mi>N</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>N</mml:mi></mml:mrow></mml:mfrac></mml:math></disp-formula></p>
<p><bold>F-Score</bold></p>
<p>It is used to measure sensitivity and mean of harmonic by using
<disp-formula id="eqn-18"><label>(18)</label><mml:math id="mml-eqn-18" display="block"><mml:mi>F</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>S</mml:mi><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mo>=</mml:mo><mml:mn>2</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:mfrac><mml:mrow><mml:mi>P</mml:mi><mml:mi>P</mml:mi><mml:mi>V</mml:mi><mml:mo>&#x00D7;</mml:mo><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mi>R</mml:mi></mml:mrow><mml:mrow><mml:mi>P</mml:mi><mml:mi>P</mml:mi><mml:mi>V</mml:mi><mml:mo>+</mml:mo><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mi>R</mml:mi></mml:mrow></mml:mfrac></mml:math></disp-formula></p>
<p><xref ref-type="table" rid="table-2">Tab. 2</xref> shows the performance metric measures of feature extraction.</p>
<table-wrap id="table-2"><label>Table 2</label><caption><title>Performance metric measures of feature extraction</title></caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th align="left">Algorithm</th>
<th align="left">Sensitivity</th>
<th align="left">Specificity</th>
<th align="left">PPV</th>
<th align="left">NPV</th>
<th align="left">F-score</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left">PCA</td>
<td align="left">84.4&#x0025;</td>
<td align="left">77.2&#x0025;</td>
<td align="left">74.6&#x0025;</td>
<td align="left">81.3&#x0025;</td>
<td align="left">75.1&#x0025;</td>
</tr>
<tr>
<td align="left">MWOA</td>
<td align="left">95.2&#x0025;</td>
<td align="left">89.5&#x0025;</td>
<td align="left">82.1&#x0025;</td>
<td align="left">85.2&#x0025;</td>
<td align="left">83.2&#x0025;</td>
</tr>
<tr>
<td align="left">MWOA-SSA</td>
<td align="left">97.8&#x0025;</td>
<td align="left">91.7&#x0025;</td>
<td align="left">88.3&#x0025;</td>
<td align="left">93.6&#x0025;</td>
<td align="left">96.4&#x0025;</td>
</tr>
</tbody>
</table>
</table-wrap>
<p><xref ref-type="table" rid="table-2">Tab. 2</xref> shows that for the sensitivity rate, MWOA-SSA is better than PCA (84.4&#x0025;) and MWOA (95.2&#x0025;). MWOA-SSA outperforms other algorithms with a specificity of 97.8&#x0025;. For PPV, MWOA-SSA has a percentage of 88.3&#x0025;. For NPV, MWOA-SSA has 93.6&#x0025;. MWOA-SAA outperforms other algorithms with an F-score of 96.4&#x0025;.</p>
<sec id="s4_1"><label>4.1</label><title>Feature Selection</title>
<p>MWOA-SSA is used for feature selection, and it is compared with existing algorithms of PCA and MWOA in terms of average fitness, average error, best fitness, mean, standard deviation, and worst fitness. The parameter values for the fitness function are 0.97 and 0.03.</p>
<p><bold>Average Error</bold></p>
<p>It shows the classifier&#x0027;s accuracy for the feature selection for the COVID-19 dataset, and it is calculated by using
<disp-formula id="eqn-19"><label>(19)</label><mml:math id="mml-eqn-19" display="block"><mml:mi>A</mml:mi><mml:mi>v</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>g</mml:mi><mml:mi>e</mml:mi><mml:mi>E</mml:mi><mml:mi>r</mml:mi><mml:mi>r</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mi>R</mml:mi></mml:mfrac><mml:munderover><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>j</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>R</mml:mi></mml:munderover><mml:mo>&#x2061;</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mi>S</mml:mi></mml:mfrac><mml:munderover><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>N</mml:mi></mml:munderover><mml:mo>&#x2061;</mml:mo><mml:mi>C</mml:mi><mml:mi>o</mml:mi><mml:mi>m</mml:mi><mml:mi>p</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>C</mml:mi><mml:mrow><mml:msub><mml:mi>l</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:mi>l</mml:mi><mml:mrow><mml:msub><mml:mi>b</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula>where <inline-formula id="ieqn-80"><mml:math id="mml-ieqn-80"><mml:mi>C</mml:mi><mml:mrow><mml:msub><mml:mi>l</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> is classifier&#x0027;s label for the pixel <italic>i</italic> and <inline-formula id="ieqn-81"><mml:math id="mml-ieqn-81"><mml:mi>l</mml:mi><mml:mrow><mml:msub><mml:mi>b</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> is the class label for the pixel <italic>i</italic> of the image and <inline-formula id="ieqn-82"><mml:math id="mml-ieqn-82"><mml:mi>C</mml:mi><mml:mi>o</mml:mi><mml:mi>m</mml:mi><mml:mi>p</mml:mi></mml:math></inline-formula> calculates the matching between two inputs.</p>
<p><bold>Mean</bold>
<disp-formula id="eqn-20"><label>(20)</label><mml:math id="mml-eqn-20" display="block"><mml:mi>M</mml:mi><mml:mi>e</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mo>=</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mi>R</mml:mi></mml:mfrac><mml:munderover><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>j</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>M</mml:mi></mml:munderover><mml:mo>&#x2061;</mml:mo><mml:mi>m</mml:mi><mml:msubsup><mml:mi>e</mml:mi><mml:mi>j</mml:mi><mml:mo>&#x2217;</mml:mo></mml:msubsup></mml:math></disp-formula></p>
<p><bold>Standard Deviation</bold>
<disp-formula id="eqn-21"><label>(21)</label><mml:math id="mml-eqn-21" display="block"><mml:mi>S</mml:mi><mml:mi>D</mml:mi><mml:mo>=</mml:mo><mml:msqrt><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:mfrac></mml:msqrt><mml:msup><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>m</mml:mi><mml:msubsup><mml:mi>e</mml:mi><mml:mi>j</mml:mi><mml:mo>&#x2217;</mml:mo></mml:msubsup><mml:mo>&#x2212;</mml:mo><mml:mi>m</mml:mi><mml:mi>e</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi></mml:mrow><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:msup></mml:math></disp-formula>where mean is obtained from <xref ref-type="disp-formula" rid="eqn-17">Eq. (17)</xref></p>
<p><bold>Best Fitness</bold></p>
<p>It calculates the minimum function of fitness, and it is calculated as
<disp-formula id="eqn-22"><label>(22)</label><mml:math id="mml-eqn-22" display="block"><mml:mi>B</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi><mml:mi>f</mml:mi><mml:mi>i</mml:mi><mml:mrow><mml:msub><mml:mi>t</mml:mi><mml:mi>n</mml:mi></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mi>M</mml:mi><mml:mi>i</mml:mi><mml:msubsup><mml:mi>n</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>M</mml:mi></mml:msubsup><mml:mi>m</mml:mi><mml:msubsup><mml:mi>e</mml:mi><mml:mi>j</mml:mi><mml:mo>&#x2217;</mml:mo></mml:msubsup></mml:math></disp-formula></p>
<p><bold>Average Fitness</bold></p>
<p>The average size of features in the COVID-19 dataset is calculated as
<disp-formula id="eqn-23"><label>(23)</label><mml:math id="mml-eqn-23" display="block"><mml:mi>A</mml:mi><mml:mi>v</mml:mi><mml:mi>g</mml:mi><mml:mi>s</mml:mi><mml:mi>i</mml:mi><mml:mi>z</mml:mi><mml:mi>e</mml:mi><mml:mo>=</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mi>M</mml:mi></mml:mfrac><mml:munderover><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>j</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>M</mml:mi></mml:munderover><mml:mo>&#x2061;</mml:mo><mml:mfrac><mml:mrow><mml:mi>s</mml:mi><mml:mi>i</mml:mi><mml:mi>z</mml:mi><mml:mi>e</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>m</mml:mi><mml:msubsup><mml:mi>e</mml:mi><mml:mi>j</mml:mi><mml:mo>&#x2217;</mml:mo></mml:msubsup></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mi>D</mml:mi></mml:mfrac></mml:math></disp-formula></p>
<p><bold>Worst Fitness</bold></p>
<p>The worst solution of fitness is calculated as
<disp-formula id="eqn-24"><label>(24)</label><mml:math id="mml-eqn-24" display="block"><mml:mi>w</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi><mml:mi>f</mml:mi><mml:mi>i</mml:mi><mml:mrow><mml:msub><mml:mi>t</mml:mi><mml:mi>n</mml:mi></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mi>M</mml:mi><mml:mi>a</mml:mi><mml:msubsup><mml:mi>x</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>M</mml:mi></mml:msubsup><mml:mi>m</mml:mi><mml:msubsup><mml:mi>e</mml:mi><mml:mi>j</mml:mi><mml:mo>&#x2217;</mml:mo></mml:msubsup></mml:math></disp-formula></p>
<p><xref ref-type="table" rid="table-3">Tab. 3</xref> shows the performance of the proposed algorithm in feature selection</p>
<table-wrap id="table-3"><label>Table 3</label><caption><title>Performance metric measures for feature selection</title></caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th align="left">Optimizer</th>
<th align="left">PCA</th>
<th align="left">MWOA</th>
<th align="left">MWOA-SSA</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left">Average error</td>
<td align="left">0.1652</td>
<td align="left">0.1547</td>
<td align="left">0.1114</td>
</tr>
<tr>
<td align="left">Average select size</td>
<td align="left">0.3234</td>
<td align="left">0.3548</td>
<td align="left">0.0715</td>
</tr>
<tr>
<td align="left">Mean</td>
<td align="left">0.3452</td>
<td align="left">0.4134</td>
<td align="left">0.1573</td>
</tr>
<tr>
<td align="left">Standard deviation</td>
<td align="left">0.0367</td>
<td align="left">0.0678</td>
<td align="left">0.0123</td>
</tr>
<tr>
<td align="left">Best fitness</td>
<td align="left">0.1264</td>
<td align="left">0.1598</td>
<td align="left">0.1034</td>
</tr>
<tr>
<td align="left">Worst fitness</td>
<td align="left">0.2763</td>
<td align="left">0.2356</td>
<td align="left">0.2115</td>
</tr>
<tr>
<td align="left">Average fitness</td>
<td align="left">0.2287</td>
<td align="left">0.2419</td>
<td align="left">0.2034</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The results of the proposed MWOA-SSA algorithm in <xref ref-type="table" rid="table-3">Tab. 3</xref> show the lower error and select features from the COVID-19 dataset. The MWOA-SSA algorithm achieved the minimum average error of 0.1114 in selecting the features of infected lung images. The minimum errors for PCA, MWOA, and MWOA-SSA are used to select the features from best fitness to worst fitness. The proposed algorithm MWOA-SSA outperforms other existing algorithms, and the best fitness value is 0.1034, the worst fitness value is 0.2115, and the average fitness value is 0.2034.</p>
</sec>
<sec id="s4_2"><label>4.2</label><title>Detection and Severity Classification of COVID-19</title>
<p>To detect the infection severity, lung images have been examined by using ground truths of CT0&#x2013;CT4 as given below. <xref ref-type="table" rid="table-4">Tab. 4</xref> presents the severity levels in the lungs.</p>
<table-wrap id="table-4"><label>Table 4</label><caption><title>Severity levels for infection in lungs [<xref ref-type="bibr" rid="ref-32">32</xref>,<xref ref-type="bibr" rid="ref-33">33</xref>]</title></caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th align="left">Class</th>
<th align="left">Infection in &#x0025;</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left">Healthy</td>
<td align="left">0</td>
</tr>
<tr>
<td align="left">Mild</td>
<td align="left">1&#xA0;-&#xA0;25%</td>
</tr>
<tr>
<td align="left">Moderate</td>
<td align="left">26&#xA0;-&#xA0;50%</td>
</tr>
<tr>
<td align="left">Severe</td>
<td align="left">51&#xA0;-&#xA0;75%</td>
</tr>
<tr>
<td align="left">Critical</td>
<td align="left">76&#xA0;-&#xA0;100%</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>In this work, we collected data on 500 patients with COVID-19 infection. Infection was confirmed by a nasopharyngeal swab using a U-TOP COVID-19 Detection Kit. Age, gender, d-dimer, ferritin levels, C-reactive protein test (CRP), and O2 were collected. Patient&#x0027;s age was classified into &#x003C;20, 21&#x2013;40, 41&#x2013;49, 50&#x2013;60, 61&#x2013;70, and &#x003E;70 years. The correlation <inline-formula id="ieqn-83"><mml:math id="mml-ieqn-83"><mml:mo stretchy="false">(</mml:mo><mml:mi>p</mml:mi><mml:mo>&#x003C;</mml:mo><mml:mn>0.05</mml:mn><mml:mo stretchy="false">)</mml:mo><mml:mspace width="thickmathspace" /></mml:math></inline-formula> between CT severity score was used to detect lung infection. <xref ref-type="table" rid="table-5">Tab. 5</xref> shows a survey of 500 patients who are affected by pneumonia. <xref ref-type="fig" rid="fig-3">Fig. 3</xref>. shows the CT severity of COVID-19 patients.</p>
<table-wrap id="table-5"><label>Table 5</label><caption><title>Demographic data of 500 patients</title></caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th align="left">Age in years</th>
<th align="left">Male (300 patients)</th>
<th align="left">Female (200 patients)</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left">5 to 20</td>
<td align="left">53</td>
<td align="left">40</td>
</tr>
<tr>
<td align="left">21 to 40</td>
<td align="left">87</td>
<td align="left">35</td>
</tr>
<tr>
<td align="left">41 to 60</td>
<td align="left">65</td>
<td align="left">45</td>
</tr>
<tr>
<td align="left">61 to 70</td>
<td align="left">35</td>
<td align="left">35</td>
</tr>
<tr>
<td align="left">More than 71 years</td>
<td align="left">60</td>
<td align="left">45</td>
</tr>
</tbody>
</table>
</table-wrap>
<p><xref ref-type="fig" rid="fig-3">Fig. 3</xref> shows that negative disease was mainly seen in the age group of 21 to 40 (30&#x0025;), mild lung mainly infection was seen in the 41 to 60 age group (60&#x0025;). Moderate lung infection was mainly seen in the 61 to 70 age group (68&#x0025;), and severe lung infection was mainly seen in the age group of 41 to 60 (70&#x0025;). This is the highest risk factor for COVID-19 affected patients [<xref ref-type="bibr" rid="ref-34">34</xref>&#x2013;<xref ref-type="bibr" rid="ref-37">37</xref>]. <xref ref-type="fig" rid="fig-4">Fig. 4</xref>. shows the time taken for the classification of COVID-19 affected cases and non-COVID-19 cases from the large dataset.</p>
<fig id="fig-3"><label>Figure 3</label><caption><title>CT-COVID severity score</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_23418-fig-3.png"/></fig>
<fig id="fig-4"><label>Figure 4</label><caption><title>Execution time (proposed method executes faster than PCA and MWOA)</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_23418-fig-4.png"/></fig>
</sec>
</sec>
<sec id="s5"><label>5</label><title>Conclusion</title>
<p>MWOA-SSA is used for the classification of COVID-19 cases in four phases. In the first phase, to classify accurate COVID-19 and non-COVID-19 images from a large dataset, pre-processing work has been done using a median filter. Features are extracted for the training CT images by PCA. For the feature selection of CT lung images, MWOA is implemented. For the selected features of the CT image, MWOA-SSA is implemented to classify the COVID-19 and non-COVID-19 images from the large dataset. This paper also proposes detecting and identifying the severity of lung infection by using different severity levels of COVID-19 cases. The main advantage of MWOA-SSA is that it efficiently and quickly classifies COVID-19 and non-COVID-19 cases and detects severity of lung infection using severity levels. MWOA-SSA has an accuracy of 97&#x0025;, whereas PCA and MWOA have accuracies of 81&#x0025; and 86&#x0025;. In future work, we suggest the use of various deep learning algorithms and various modalities of images and clinical reports.</p>
</sec>
</body>
<back>
<fn-group>
<fn fn-type="other"><p><bold>Funding Statement:</bold> The authors received no specific funding for this study.</p></fn>
<fn fn-type="conflict"><p><bold>Conflicts of Interest:</bold> The authors declare that they have no conflicts of interest to report regarding the present study.</p></fn>
</fn-group>
<ref-list content-type="authoryear">
<title>References</title>
<ref id="ref-1"><label>[1]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Q. V.</given-names> <surname>Pham</surname></string-name>, <string-name><given-names>D. C.</given-names> <surname>Nguyen</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Huynh</surname></string-name>, <string-name><given-names>W. J.</given-names> <surname>Hwang</surname></string-name> and <string-name><given-names>P. N.</given-names> <surname>Pathirana</surname></string-name></person-group>, &#x201C;<article-title>Artificial Intelligence (AI) and big data for coronavirus (COVID-19) pandemic: A survey on the state-of-the-arts</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>8</volume>, pp. <fpage>130820</fpage>&#x2013;<lpage>130839</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-2"><label>[2]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>X. H.</given-names> <surname>yuen Frank Wong</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Yin Sonia Lam</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Ho Tung Fong</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Leung</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Wing Yan Chin</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Frequency and distribution of chest radiographic findings in patients positive for COVID-19</article-title>,&#x201D; <source>Radiology</source>, vol. <volume>296</volume>, no. <issue>2</issue>, pp. <fpage>E72</fpage>&#x2013;<lpage>E78</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-3"><label>[3]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>C.</given-names> <surname>Sanghoon</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Sunho</surname></string-name>, <string-name><given-names>K.</given-names> <surname>Changhwan</surname></string-name>, <string-name><given-names>W.</given-names> <surname>Sunhee</surname></string-name>, <string-name><given-names>K.</given-names> <surname>Taejin</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Enhancement of soft-tissue contrast in cone-beam CT using an anti-scatter grid with a sparse sampling approach</article-title>,&#x201D; <source>Physics in Medicine and Biology</source>, vol. <volume>7</volume>, pp. <fpage>1</fpage>&#x2013;<lpage>9</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-4"><label>[4]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>B. J.</given-names> <surname>Walker</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Radtke</surname></string-name>, <string-name><given-names>G. H.</given-names> <surname>Chen</surname></string-name>, <string-name><given-names>K. W.</given-names> <surname>Eliceiri</surname></string-name> and <string-name><given-names>T. R.</given-names> <surname>Mackie</surname></string-name></person-group>, &#x201C;<article-title>A beam optics study of a modular multi-source X-ray tube for novel computed tomography applications</article-title>,&#x201D; <source>Nuclear Instruments and Methods in Physics Research Section A: Accelerators, Spectrometers, Detectors and Associated Equipment</source>, vol. <volume>868</volume>, pp. <fpage>1</fpage>&#x2013;<lpage>9</lpage>, <year>2017</year>.</mixed-citation></ref>
<ref id="ref-5"><label>[5]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>L. T.</given-names> <surname>Campos</surname></string-name>, <string-name><given-names>F. M.</given-names> <surname>Jesus</surname></string-name>, <string-name><given-names>E. A.</given-names> <surname>De Souza Gon&#x00E7;alves</surname></string-name> and <string-name><given-names>L. A. G.</given-names> <surname>Magalh&#x00E3;es</surname></string-name></person-group>, &#x201C;<article-title>Computed tomography x-ray characterization: A monte carlo study</article-title>,&#x201D; <source>Radiation Physics and Chemistry</source>, vol. <volume>167</volume>, <comment>Article ID 108359</comment>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-6"><label>[6]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M. K.</given-names> <surname>Honkanen</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Matikka</surname></string-name>, <string-name><given-names>J. T.</given-names> <surname>Honkanen</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Bhattarai</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Grinstaff</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Imaging of proteoglycan and water contents in human articular cartilage with fullbody CT using dual contrast technique</article-title>,&#x201D; <source>Journal of Orthopaedic Research</source>, vol. <volume>37</volume>, no. <issue>5</issue>, pp. <fpage>1059</fpage>&#x2013;<lpage>1070</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-7"><label>[7]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>E.</given-names> <surname>Montagnon</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Cerny</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Cadrin ch&#x00E1;nevert</surname></string-name>, <string-name><given-names>V.</given-names> <surname>Hamilton</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Derennes</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Deep learning workflow in radiology: A primer</article-title>,&#x201D; <source>Insights into Imaging</source>, vol. <volume>11</volume>, no. <issue>1</issue>, Article Number 22, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-8"><label>[8]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Ibrahim</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Mohammed</surname></string-name>, <string-name><given-names>H. A.</given-names> <surname>Ali</surname></string-name> and <string-name><given-names>S. E.</given-names> <surname>Hussein</surname></string-name></person-group>, &#x201C;<article-title>Breast cancer segmentation from thermal images based on chaotic salp swarm algorithm</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>8</volume>, no. <issue>1</issue>, pp. <fpage>122121</fpage>&#x2013;<lpage>122134</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-9"><label>[9]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M. A. A.</given-names> <surname>Qaness</surname></string-name>, <string-name><given-names>A. A.</given-names> <surname>Ewees</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Fan</surname></string-name> and <string-name><given-names>M.</given-names> <surname>Abd</surname></string-name></person-group>, &#x201C;<article-title>Optimization method for forecasting confirmed cases of COVID-19 in China</article-title>,&#x201D; <source>Journal of Clinical Medicine</source>, vol. <volume>9</volume>, no. <issue>3</issue>, Article Number 674, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-10"><label>[10]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Li</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Fang</surname></string-name>, <string-name><given-names>W.</given-names> <surname>Li</surname></string-name>, <string-name><given-names>C.</given-names> <surname>Pan</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Qin</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>CT image visual quantitative evaluation and clinical classification of coronavirus disease (COVID-19)</article-title>,&#x201D; <source>European Radiology</source>, vol. <volume>30</volume>, no. <issue>8</issue>, pp. <fpage>4407</fpage>&#x2013;<lpage>4416</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-11"><label>[11]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Chung</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Bernheim</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Mei</surname></string-name>, <string-name><given-names>N.</given-names> <surname>Zhang</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Huang</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>CT imaging features of 2019 novel coronavirus (2019-nCoV)</article-title>,&#x201D;<source>Radiology</source>, vol. <volume>295</volume>, no. <issue>1</issue>, pp. <fpage>202</fpage>&#x2013;<lpage>207</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-12"><label>[12]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>R.</given-names> <surname>Yang</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Li</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Liu</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Zhen</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Zhang</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Chest CT severity score: An imaging tool for assessing severe COVID-19</article-title>,&#x201D; <source>Radiology Cardiothoracic Imaging</source>, vol. <volume>2</volume>, no. <issue>2</issue>, pp. <fpage>e200047</fpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-13"><label>[13]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>X.</given-names> <surname>Wu</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Hui</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Niu</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Li</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Wang</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Deep learning-based multi-view fusion model for screening 2019 novel coronavirus pneumonia: A multicentre study</article-title>,&#x201D; <source>European Journal of Radiology</source>, vol. <volume>128</volume>, Article ID 109041, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-14"><label>[14]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A. A.</given-names> <surname>Ardakani</surname></string-name>, <string-name><given-names>A. R.</given-names> <surname>Kanafi</surname></string-name>, <string-name><given-names>U. R.</given-names> <surname>Acharya</surname></string-name>, <string-name><given-names>N.</given-names> <surname>Khadem</surname></string-name> and <string-name><given-names>A.</given-names> <surname>Mohammadi</surname></string-name></person-group>, &#x201C;<article-title>Application of deep learning technique to manage COVID-19 in routine clinical practice using CT images: Results of 10 convolutional neural networks</article-title>,&#x201D; <source>Computers in Biology and Medicine</source>, vol. <volume>121</volume>, Article ID 103795, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-15"><label>[15]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M. A.</given-names> <surname>Elaziz</surname></string-name>, <string-name><given-names>K. M.</given-names> <surname>Hosny</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Salah</surname></string-name>, <string-name><given-names>M. M.</given-names> <surname>Darwish</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Lu</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>New machine learning method for image-based diagnosisof COVID-19</article-title>,&#x201D; <source>PLoS ONE</source>, vol. <volume>15</volume>, no. <issue>6</issue>, p. <fpage>e0235187</fpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-16"><label>[16]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Hu</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Gao</surname></string-name>, <string-name><given-names>Z.</given-names> <surname>Niu</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Jiang</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Li</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Weakly supervised deep learning for COVID-19 infection detection and classification from CT images</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>8</volume>, pp. <fpage>118869</fpage>&#x2013;<lpage>118883</lpage>. <year>2020</year>.</mixed-citation></ref>
<ref id="ref-17"><label>[17]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Nour</surname></string-name>, <string-name><given-names>Z.</given-names> <surname>C&#x00F6;mert</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Polat</surname></string-name></person-group>, &#x201C;<article-title>A novel medical diagnosis model for COVID-19 infection detection based on deep features and Bayesian optimization</article-title>,&#x201D; <source>Applied Soft Computing</source>, vol. <volume>97</volume>, Part A, p. <fpage>106580</fpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-18"><label>[18]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Hu</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Gao</surname></string-name>, <string-name><given-names>Z.</given-names> <surname>Niu</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Jiang</surname></string-name> and <string-name><given-names>L.</given-names> <surname>Li</surname></string-name></person-group>, &#x201C;<article-title>Weakly supervised deep learning for COVID-19 infection detection and classification from CT images</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>8</volume>, pp. <fpage>118869</fpage>&#x2013;<lpage>118883</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-19"><label>[19]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Zhang</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Liu</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Shen</surname></string-name>, <string-name><given-names>Z.</given-names> <surname>Li</surname></string-name> and <string-name><given-names>Y.</given-names> <surname>Sang</surname></string-name></person-group>, &#x201C;<article-title>Clinically applicable AI system for accurate diagnosis, quantitative measurements, and prognosis of COVID-19 pneumonia using computed tomography</article-title>,&#x201D; <source>Cell</source>, vol. <volume>181</volume>, no. <issue>6</issue>, pp. <fpage>1423</fpage>&#x2013;<lpage>1433</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-20"><label>[20]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>H.</given-names> <surname>Panwar</surname></string-name>, <string-name><given-names>P. K.</given-names> <surname>Gupta</surname></string-name>, <string-name><given-names>M. K.</given-names> <surname>Siddiqui</surname></string-name>, <string-name><given-names>R.</given-names> <surname>Morales Menendez</surname></string-name> and <string-name><given-names>V.</given-names> <surname>Singh</surname></string-name></person-group>, &#x201C;<article-title>Application of deep learning for fast detection of COVID-19 in X-rays using nCOVnet</article-title>,&#x201D; <source>Chaos, Solitons Fractals</source>, vol. <volume>138</volume>, Article ID 109944, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-21"><label>[21]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>X.</given-names> <surname>Xu</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Jiang</surname></string-name>, <string-name><given-names>C.</given-names> <surname>Ma</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Du</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Li</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>A deep learning system to screen novel coronavirus disease 2019 pneumonia</article-title>,&#x201D; <source>Engineering</source>, vol. <volume>6</volume>, no. <issue>10</issue>, pp. <fpage>1122</fpage>&#x2013;<lpage>1129</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-22"><label>[22]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Q.</given-names> <surname>AlTashi</surname></string-name>, <string-name><given-names>S. J.</given-names> <surname>Abdul Kadir</surname></string-name>, <string-name><given-names>H. M.</given-names> <surname>Rais</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Mirjalili</surname></string-name> and <string-name><given-names>H.</given-names> <surname>Alhussian</surname></string-name></person-group>, &#x201C;<article-title>Binary optimization using hybrid grey wolf optimization for feature selection</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>7</volume>, pp. <fpage>39496</fpage>&#x2013;<lpage>39508</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-23"><label>[23]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Ye</surname></string-name>, <string-name><given-names>Q.</given-names> <surname>Zhu</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Li</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Lu</surname></string-name> and <string-name><given-names>H.</given-names> <surname>Yuan</surname></string-name></person-group>, &#x201C;<article-title>A feasibility study of pulmonary nodule detection by ultralow-dose CT with adaptive statisticaliterative reconstructionV technique</article-title>,&#x201D; <source>European Journal of Radiology</source>, vol. <volume>119</volume>, Article ID 108652, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-24"><label>[24]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>R.</given-names> <surname>Yamashita</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Nishio</surname></string-name>, <string-name><given-names>R. K. G.</given-names> <surname>Do</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Togashi</surname></string-name></person-group>, &#x201C;<article-title>Convolutional neural networks: An overview and application in radiology</article-title>,&#x201D; <source>Insights into Image Processing</source>, vol. <volume>9</volume>, no. <issue>4</issue>, pp. <fpage>611</fpage>&#x2013;<lpage>629</lpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-25"><label>[25]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J.</given-names> <surname>Fu</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Wang</surname></string-name>, <string-name><given-names>W.</given-names> <surname>Guo</surname></string-name> and <string-name><given-names>P.</given-names> <surname>Peng</surname></string-name></person-group>, &#x201C;<article-title>Multi-mounted X-Ray beam computed tomography</article-title>,&#x201D; <source>Nuclear Instruments and Methods in Physics Research</source>, vol. <volume>888</volume>, pp. <fpage>119</fpage>&#x2013;<lpage>125</lpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-26"><label>[26]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>B. J.</given-names> <surname>Walker</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Radtke</surname></string-name>, <string-name><given-names>G. H.</given-names> <surname>Chen</surname></string-name>, <string-name><given-names>K. W.</given-names> <surname>Eliceiri</surname></string-name> and <string-name><given-names>T. R.</given-names> <surname>Mackie</surname></string-name></person-group>, &#x201C;<article-title>A beam optics study of a modular multi-source X-ray tube for novel computed tomography applications</article-title>,&#x201D; <source>Nuclear Instruments and Methods in Physics Research</source>, vol. <volume>868</volume>, pp. <fpage>1</fpage>&#x2013;<lpage>9</lpage>, <year>2017</year>.</mixed-citation></ref>
<ref id="ref-27"><label>[27]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Mirjalili</surname></string-name> and <string-name><given-names>A.</given-names> <surname>Lewis</surname></string-name></person-group>, &#x201C;<article-title>The whale optimization algorithm</article-title>,&#x201D; <source>Advances in Engineering Software</source>, vol. <volume>95</volume>, pp. <fpage>51</fpage>&#x2013;<lpage>67</lpage>, <year>2016</year>.</mixed-citation></ref>
<ref id="ref-28"><label>[28]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Mirjalili</surname></string-name>, <string-name><given-names>S. M.</given-names> <surname>Mirjalili</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Saremi</surname></string-name> and <string-name><given-names>S.</given-names> <surname>Mirjalili</surname></string-name></person-group>, &#x201C;<article-title>Whale optim. algorithm: Theory, literature review, and application in designing photonic crystal filters</article-title>,&#x201D; <source>Studies in Computational Intelligence</source>, vol. <volume>811</volume>, Springer, pp. <fpage>219</fpage>&#x2013;<lpage>238</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-29"><label>[29]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>E. S. E.</given-names> <surname>Kenawy</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Ibrahim</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Mirjalili</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Eid</surname></string-name> and <string-name><given-names>M. M.</given-names> <surname>Hussein</surname></string-name></person-group>, &#x201C;<article-title>Novel feature selection and voting classifier algorithms for COVID-19 classification in CT images</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>8</volume>, pp. <fpage>179317</fpage>&#x2013;<lpage>179335</lpage>,2020.</mixed-citation></ref>
<ref id="ref-30"><label>[30]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Z. M.</given-names> <surname>Yaseen</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Faris</surname></string-name> and <string-name><given-names>N.</given-names> <surname>Al-Ansari</surname></string-name></person-group>, &#x201C;<article-title>Hybridized extreme learning machine model with salp swarm algorithm: A novel predictive model for hydrological application</article-title>,&#x201D; <source>Complexity</source>, vol. <volume>2020</volume>, Article ID 8206245, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-31"><label>[31]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>G. A.</given-names> <surname>Saeed</surname></string-name>, <string-name><given-names>W.</given-names> <surname>Gaba</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Shah</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Al Helali</surname></string-name>, <string-name><given-names>E.</given-names> <surname>Raidullah</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Correlation between chest CT severity scores and the clinical parameters of adult patients with COVID-19 pneumonia</article-title>,&#x201D; <source>Radiology Research and Practice</source>, vol. <volume>2021</volume>, Article ID 6697677, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-32"><label>[32]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Y.</given-names> <surname>Qiblawey</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Tahir</surname></string-name>, <string-name><given-names>M. E.</given-names> <surname>Chowdhury</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Khandakar</surname></string-name> and <string-name><given-names>S.</given-names> <surname>Kiranyaz</surname></string-name></person-group>, &#x201C;<article-title>Detection and severity classification of COVID-19 in CT images using deep learning</article-title>,&#x201D; <source>Diagnostics</source>, vol. <volume>11</volume>, no. <issue>5</issue>, Article Number 893, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-33"><label>[33]</label><mixed-citation publication-type="other"><person-group person-group-type="author"><string-name><given-names>J.</given-names> <surname>Zhao</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Zhang</surname></string-name>, <string-name><given-names>X.</given-names> <surname>He</surname></string-name> and <string-name><given-names>P.</given-names> <surname>Xie</surname></string-name></person-group>, &#x201C;<article-title>COVID-CT-dataset: A CT scan dataset about COVID-19</article-title>,&#x201D; <italic>arXiv preprint arXiv: 2003.13865</italic>, vol. <volume>490</volume>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-34"><label>[34]</label><mixed-citation publication-type="book"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Kukan</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Gokul</surname></string-name>, <string-name><given-names>S.S.</given-names> <surname>Vishnu Priyan</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Barathi Kanna</surname></string-name> and <string-name><given-names>E.</given-names> <surname>Prabhu</surname></string-name></person-group>, &#x201C;<chapter-title>COVID-19: Smart shop surveillance system</chapter-title>,&#x201D; in <source>the Intelligent Sustainable Systems</source>, <edition>1st ed.</edition>, vol. <volume>1</volume>. <publisher-loc>Henderson, Singapore</publisher-loc>: <publisher-name>Springer</publisher-name>. <year>2022</year>.</mixed-citation></ref>
<ref id="ref-35"><label>[35]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Aruul Mozhi Varman</surname></string-name>, <string-name><given-names>A. R.</given-names> <surname>Baskaran</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Aravindh</surname></string-name> and <string-name><given-names>E.</given-names> <surname>Prabhu</surname></string-name></person-group>, &#x201C;<article-title>Deep learning and IoT for smart agriculture using WSN</article-title>,&#x201D; in <conf-name>Proc. ICCIC</conf-name>, <conf-loc>Coimbatore, India</conf-loc>, pp. <fpage>1</fpage>&#x2013;<lpage>6</lpage>, <year>2017</year>.</mixed-citation></ref>
<ref id="ref-36"><label>[36]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Kanakaprabha</surname></string-name> and <string-name><given-names>D.</given-names> <surname>Radha</surname></string-name></person-group>, &#x201C;<article-title>Analysis of COVID-19 and pneumonia detection in chest X-ray images using deep learning</article-title>,&#x201D; in <conf-name>Proc. ICCISC</conf-name>, <conf-loc>Idukki, India</conf-loc>, pp. <fpage>1</fpage>&#x2013;<lpage>6</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-37"><label>[37]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>H.</given-names> <surname>Sathyan</surname></string-name> and <string-name><given-names>J. V.</given-names> <surname>Panicker</surname></string-name></person-group>, &#x201C;<article-title>Lung nodule classification using deep convnets on CT images</article-title>,&#x201D; in <conf-name>Proc. ICCCNT</conf-name>, <conf-loc>Bengaluru, India</conf-loc>, pp. <fpage>1</fpage>&#x2013;<lpage>5</lpage>, <year>2018</year>.</mixed-citation></ref>
</ref-list>
</back>
</article>