<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.1 20151215//EN" "http://jats.nlm.nih.gov/publishing/1.1/JATS-journalpublishing1.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="1.1">
<front>
<journal-meta>
<journal-id journal-id-type="pmc">CMC</journal-id>
<journal-id journal-id-type="nlm-ta">CMC</journal-id>
<journal-id journal-id-type="publisher-id">CMC</journal-id>
<journal-title-group>
<journal-title>Computers, Materials &#x0026; Continua</journal-title>
</journal-title-group>
<issn pub-type="epub">1546-2226</issn>
<issn pub-type="ppub">1546-2218</issn>
<publisher>
<publisher-name>Tech Science Press</publisher-name>
<publisher-loc>USA</publisher-loc>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">24312</article-id>
<article-id pub-id-type="doi">10.32604/cmc.2022.024312</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Article</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Optimal Hybrid Feature Extraction with Deep Learning for COVID-19 Classifications</article-title>
<alt-title alt-title-type="left-running-head">Optimal Hybrid Feature Extraction with Deep Learning for COVID-19 Classifications</alt-title>
<alt-title alt-title-type="right-running-head">Optimal Hybrid Feature Extraction with Deep Learning for COVID-19 Classifications</alt-title>
</title-group>
<contrib-group content-type="authors">
<contrib id="author-1" contrib-type="author">
<name name-style="western"><surname>Eltahir</surname><given-names>Majdy M.</given-names></name><xref ref-type="aff" rid="aff-1">1</xref>
</contrib>
<contrib id="author-2" contrib-type="author">
<name name-style="western"><surname>Abunadi</surname><given-names>Ibrahim</given-names></name><xref ref-type="aff" rid="aff-2">2</xref>
</contrib>
<contrib id="author-3" contrib-type="author">
<name name-style="western"><surname>Al-Wesabi</surname><given-names>Fahd N.</given-names></name><xref ref-type="aff" rid="aff-3">3</xref>
<xref ref-type="aff" rid="aff-4">4</xref>
</contrib>
<contrib id="author-4" contrib-type="author">
<name name-style="western"><surname>Hilal</surname><given-names>Anwer Mustafa</given-names></name><xref ref-type="aff" rid="aff-5">5</xref>
</contrib>
<contrib id="author-5" contrib-type="author">
<name name-style="western"><surname>Yousif</surname><given-names>Adil</given-names></name><xref ref-type="aff" rid="aff-6">6</xref>
</contrib>
<contrib id="author-6" contrib-type="author">
<name name-style="western"><surname>Motwakel</surname><given-names>Abdelwahed</given-names></name><xref ref-type="aff" rid="aff-5">5</xref>
</contrib>
<contrib id="author-7" contrib-type="author">
<name name-style="western"><surname>Duhayyim</surname><given-names>Mesfer Al</given-names></name><xref ref-type="aff" rid="aff-7">7</xref>
</contrib>
<contrib id="author-8" contrib-type="author" corresp="yes">
<name name-style="western"><surname>Hamza</surname><given-names>Manar Ahmed</given-names></name><xref ref-type="aff" rid="aff-5">5</xref><email>ma.hamza@psau.edu.sa</email>
</contrib>
<aff id="aff-1"><label>1</label><institution>Department of Information Systems, College of Science &#x0026; Art at Mahayil, King Khalid University</institution>, <country>Saudi Arabia</country></aff>
<aff id="aff-2"><label>2</label><institution>Department of Information Systems, Prince Sultan University</institution>, <addr-line>Riyadh, 11586</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff-3"><label>3</label><institution>Department of Computer Science, King Khalid University</institution>, <addr-line>Muhayel Aseer, 62529</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff-4"><label>4</label><institution>Faculty of Computer and IT, Sana&#x0027;a University</institution>, <addr-line>Sana&#x0027;a, 61101</addr-line>, <country>Yemen</country></aff>
<aff id="aff-5"><label>5</label><institution>Department of Computer and Self Development, Preparatory Year Deanship, Prince Sattam bin Abdulaziz University</institution>, <addr-line>AlKharj</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff-6"><label>6</label><institution>Faculty of Arts and Science, Najran University</institution>, <addr-line>Sharourah</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff-7"><label>7</label><institution>Department of Natural and Applied Sciences, College of Community-Aflaj, Prince Sattam bin Abdulaziz University</institution>, <country>Saudi Arabia</country></aff>
</contrib-group>
<author-notes>
<corresp id="cor1"><label>&#x002A;</label>Corresponding Author: Manar Ahmed Hamza. Email: <email>ma.hamza@psau.edu.sa</email></corresp>
</author-notes>
<pub-date pub-type="epub" date-type="pub" iso-8601-date="2022-01-13"><day>13</day>
<month>01</month>
<year>2022</year></pub-date>
<volume>71</volume>
<issue>3</issue>
<fpage>6257</fpage>
<lpage>6273</lpage>
<history>
<date date-type="received"><day>13</day><month>10</month><year>2021</year></date>
<date date-type="accepted"><day>16</day><month>12</month><year>2021</year></date>
</history>
<permissions>
<copyright-statement>&#x00A9; 2022 Eltahir et al.</copyright-statement>
<copyright-year>2022</copyright-year>
<copyright-holder>Eltahir et al.</copyright-holder>
<license xlink:href="https://creativecommons.org/licenses/by/4.0/">
<license-p>This work is licensed under a <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution 4.0 International License</ext-link>, which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.</license-p>
</license>
</permissions>
<self-uri content-type="pdf" xlink:href="TSP_CMC_24312.pdf"></self-uri>
<abstract>
<p>Novel coronavirus 2019 (COVID-19) has affected the people&#x0027;s health, their lifestyle and economical status across the globe. The application of advanced Artificial Intelligence (AI) methods in combination with radiological imaging is useful in accurate detection of the disease. It also assists the physicians to take care of remote villages too. The current research paper proposes a novel automated COVID-19 analysis method with the help of Optimal Hybrid Feature Extraction (OHFE) and Optimal Deep Neural Network (ODNN) called OHFE-ODNN from chest x-ray images. The objective of the presented technique is for performing binary and multi-class classification of COVID-19 analysis from chest X-ray image. The presented OHFE-ODNN method includes a sequence of procedures such as Median Filtering (MF)-based pre-processed, feature extraction and finally, binary (COVID/Non-COVID) and multiclass (Normal, COVID, SARS) classification. Besides, in OHFE-based feature extraction, Gray Level Co-occurrence Matrix (GLCM) and Histogram of Gradients (HOG) are integrated together. The presented OHFE-ODNN model includes Squirrel Search Algorithm (SSA) for fine-tuning the parameters of DNN. The performance of the presented OHFE-ODNN technique is conducted using chest x-rays dataset. The presented OHFE-ODNN method classified the binary classes effectively with a maximum precision of 95.82&#x0025;, accuracy of 94.01&#x0025; and F-score of 96.61&#x0025;. Besides, multiple classes were classified proficiently by OHFE-ODNN model with a precision of 95.63&#x0025;, accuracy of 95.60&#x0025; and an F-score of 95.73&#x0025;.</p>
</abstract>
<kwd-group kwd-group-type="author">
<kwd>COVID-19</kwd>
<kwd>classification</kwd>
<kwd>deep learning</kwd>
<kwd>radiological images</kwd>
</kwd-group>
</article-meta>
</front>
<body>
<sec id="s1"><label>1</label><title>Introduction</title>
<p>Respiratory infections in human beings tend to limit their survival rate and is highly fatal in nature. In 2019, SARS-coV-2 was first diagnosed and named as COVID-19 by WHO. It falls under a group of viruses named coronavirus. Being a respiratory virus, COVID-19 causes severe cold, cough and fever along with respiratory syndromes like Middle East Respiratory Syndrome (MERS) and Severe Acute Respiratory Syndrome (SARS) [<xref ref-type="bibr" rid="ref-1">1</xref>]. This virus is named as corona viruses because of its crown-shaped tips at its base. Novel coronavirus is generally found in animals; while it has the capability to spread to human beings and can quickly become a pandemic, affecting entire human population. COVID-19, as the name says, the outbreak of this deadly virus attacked the human population in the year 2019 and gained pandemic status by March 2020. It is an air borne disease i.e., its major source of spreading is through air which was confirmed on 28<sup>th</sup> January 2020. On 15<sup>th</sup> February 2020, in excess of 5,000 COVID-19 positive cases were confirmed and registered a total of 106 deaths.</p>
<p>Since 23<sup>rd</sup> January 2020, entire Wuhan city, China was quarantined by terminating all the resources such as shops, transportation modes and other sources in and out of the city. Further, quarantine was also extended to other neighboring countries. Followed by, several other countries have also quarantined the corona-affected patients. Italy has become the epicenter in European region since by March 2020, it recorded the highest death in the world. By 05<sup>th</sup> of April 2020, 15,000 deaths were registered in Italy, while it was 8,000 in Lombardia, 21,000 in Emilia-Romagna, and 1,200 in Piedmont.</p>
<p>In medical perspective, COVID-19 disease causes massive and highly mortal pneumonia with clinical depiction being SARS-CoV. Obviously, the patients exhibit flu-based signs like fever, dry cough, sore throat, tiredness and shortage in breathing. The pneumonia patients were further weakened by this disease and resulted in acute renal failure, and finally death. In addition to the above-mentioned symptoms, COVID-19 shows other symptoms such as lack of taste and smell as well. Hence, such asymptomatic patients acted as carriers due to which their immediate contacts got infected with COVID-19. It can be found that other inhabitants of country who were sampled for pharyngeal swab, and 50&#x0025;&#x2013;75&#x0025; of the individuals were swab positive. This inferred that that they were affected with the virus without any symptoms [<xref ref-type="bibr" rid="ref-2">2</xref>].</p>
<p>In recent times, COVID-19 can be diagnosed through nose swab collected from the patient and by making the sample undergo Polymerase Chain Reaction (PCR). The major issue is that the swab can be taken and diagnosed for the infected people. But, asymptomatic patients could not be identified easily unless they exhibit some symptoms. If the diagnosis confirmed that a patient has COVID-19 with the help of PCR, the affected patients with pneumonia can be confirmed through chest X-ray. Then the Computed Tomography (CT) images in this model are assumed as the features for human eye, as illustrated by the developers in [<xref ref-type="bibr" rid="ref-3">3</xref>]. The spreading rate of COVID-19 is decided based on the ability of finding the diseased patients with minimum false negatives. By ensuring better disease management, it is clear that the periodical disease prediction activates the execution of monitoring care, which is highly essential for COVID-19 patients. The patients have been proven to show anomalies in their chest CT images with maximum bilateral contribution. Biomedical imaging implies the symptoms of pneumonia. WHO has announced various supplemental diagnostic protocols for COVID-19.</p>
<p>Diagnosis is generally carried out by processing Real-time Reverse PCR (rRT-PCR) upon biological samples collected from the patients. These samples may be sputum, blood and so on. Generally, it is accessible with a limited period of time. COVID-19 is probably predicted with the application of radiological imaging whereas its detection observed an increase from clinical photographs, where X-Rays are applied. Various works have described the prediction process of pulmonary disease by diagnosing the clinical images using Artificial Intelligence (AI). AI is a newly developed technology in recent times and its application is highly helpful for professionals in the interpretation of clinical images. Specifically, transfer learning and Deep Learning (DL) methodologies have been established and reused many times as initial point for consecutive operation. Deep Learning is one of the well-known methods in which pre-trained approaches are employed as primary points on computer vision as well as natural language computations. It offers a wide range of procedural resources that are essential for the development Neural Network (NN) approaches, to resolve the issues and from huge jumps that offer relevant issues. The current efforts have implied drastic enhancement in the prediction of clinical sector, for example, lung cancer prediction, prostate cancer ranking etc.</p>
<p>The current research article presents a novel automated COVID-19 analysis method utilizing Optimal Hybrid Feature Extraction (OHFE) and Optimal Deep Neural Network, abbreviated as OHFE-ODNN in chest x-ray image. The proposed OHFE-ODNN method contains a sequence of procedures such as Median Filtering (MF)-based preprocessing, OHFE-based feature extraction and finally ODNN-based classifier. Here, OHFE is a combination of optimal GLCM and HOG features, where the optimal set of features are chosen by Oppositional Crow Search (OCS) algorithm. The ODNN model includes Squirrel Search Algorithm (SSA) for fine-tuning the DNN parameters. The performance of the OHFE-ODNN model was assessed utilizing chest x-rays dataset. The experimentation outcome proved the effective efficiency of OHFE-ODNN method compared to existing methods.</p>
<p>Rest of the paper is ordered as follows. Section 2 offers a detailed survey of existing techniques. Section 3 introduces the proposed OHFE-ODNN technique and Section 4 validates the performance of the proposed method. At last, Section 5 concludes the work.</p>
</sec>
<sec id="s2"><label>2</label><title>Literature Review</title>
<p>The recent advancements made in medical image processing methodologies triggered a rapid development in the establishments of smart detection as well as diagnosis materials. ML models are highly approved these days, as eminent modes for disease analysis. Thus, effective feature extraction models are essential for accomplishing optimal Machine Learning (ML) techniques. However, DL approaches have been extensively used in medical imaging models, since the features are extracted in an automated manner or with the help of a few pre-defined models like ResNet. Yu et al. [<xref ref-type="bibr" rid="ref-4">4</xref>] employed CNN for classifying the chest CT images of COVID-19 positive patients. Nardelli et al. [<xref ref-type="bibr" rid="ref-5">5</xref>] utilized 3D CNN for categorizing the pulmonary artery&#x2013;vein sections in chest CT scan image. Shin et al. [<xref ref-type="bibr" rid="ref-6">6</xref>] applied DCNN for classifying interstitial lung disease from CT scan image.</p>
<p>Xie et al. [<xref ref-type="bibr" rid="ref-7">7</xref>] divided the benign and malignant lung nodules under the application of knowledge-based collaborative DL on chest CT. This method attained the maximum accuracy in terms of classifying the lung nodes. Hagerty et al. [<xref ref-type="bibr" rid="ref-8">8</xref>] segregated melanoma dermoscopy image under the application of DL which achieved standard accuracy. Gerard et al. [<xref ref-type="bibr" rid="ref-9">9</xref>] predicted the pulmonary fissure from CT scan image with use of supervised discriminative learning approach. Setio et al. [<xref ref-type="bibr" rid="ref-10">10</xref>] employed a multi-view traditional system for the prediction of pulmonary nodule from CT image. Xia et al. [<xref ref-type="bibr" rid="ref-11">11</xref>] applied deep adversarial systems for the segmentation of abdominal CT image. Pezeshk et al. [<xref ref-type="bibr" rid="ref-12">12</xref>] made use of 3D CNN method to predict the pulmonary nodule from chest CT scan image. Zreik et al. [<xref ref-type="bibr" rid="ref-13">13</xref>] leveraged a classifier technique with the help of recurrent CNN for classifying Coronary Artery Plaque and Stenosis from coronary CT scan images. The study employed full 3D CNN in order to combine multi-dimensional data to tumor segments in CT. Bhandary et al. [<xref ref-type="bibr" rid="ref-14">14</xref>] deployed a methodology for the detection of lung infection utilizing DL technology. Gao et al. [<xref ref-type="bibr" rid="ref-15">15</xref>] utilized 3D block-based residual DL system for predicting tuberculosis disease levels in CT pulmonary image. Pannu et al. [<xref ref-type="bibr" rid="ref-16">16</xref>] developed PSO-relied ANFIS for the enhancement of classification rate. Zeng et al. [<xref ref-type="bibr" rid="ref-17">17</xref>] executed Gated bi-directional CNN (GCNN). GCNN was applied from the classification of patients whether affected with COVID-19 or not.</p>
<p>Dorgham et al. [<xref ref-type="bibr" rid="ref-18">18</xref>] intended to improve the security of communication and storage of medical images in cloud with the help of hybrid encryption techniques. In this study, symmetric and asymmetric encryption algorithms were incorporated. Due to this, a fast and secure encryption process was executed. Besides, three diverse techniques were selected in this study to build the model and security was increased utilizing digital signature approach. In literature [<xref ref-type="bibr" rid="ref-19">19</xref>], a secure image fusion approach was presented to preserve the privacy and protect copyright. In this study, two cloud services were utilized to eliminate the need for Trusted Third Party (TTP). Gudivada et al. [<xref ref-type="bibr" rid="ref-20">20</xref>] developed an efficient model to develop, maintain and utilize the models that can improve the healthcare sector. The goal of the study is to offer resources that can be utilized in the development of resembling models and deploying it in healthcare sector. As per the literature [<xref ref-type="bibr" rid="ref-21">21</xref>], Denotational Mathematics can act as an effective technique to model and control the DL network. Thus, it enhances the quality of healthcare decision making. Ghoneim et al. [<xref ref-type="bibr" rid="ref-22">22</xref>] presented an effective medical image forgery detection system for medical field to ensure that the images relevant to medical field remains unchanged. Gol&#x00E9;a et al. [<xref ref-type="bibr" rid="ref-23">23</xref>] presented a ROI based fragile watermarking method to detect the tampering of medical images. It is based on the network transmitted, where the sent message is split as packets whereas the redundant data is appended for treating the errors.</p>
<p>To summarize, it is identified that the DL approach accomplishes better outcomes to COVID-19 disease classification in chest CT scan image. DL methods might attain optimal results; hence the results could be maximized in future with the help of effective feature extraction models like participants of ResNet. In addition, hyper-tuning of DL methods could be accomplished with the help of transfer learning too. Thus, the establishment of new Deep Transfer Learning (DTL) based COVID-19 patient classification method forms a significant inspiration for current study.</p>
</sec>
<sec id="s3"><label>3</label><title>The Proposed Method</title>
<p><?A3B2 "fig1",5,"anchor"?><xref ref-type="fig" rid="fig-1">Fig. 1</xref> illustrates the working principle of proposed OHFE-ODNN technique. As shown, the input images are pre-processed using MF manner. Then, the hybrid set of OGLCM and HOG features are extracted. Finally, ODNN is applied with SSA to classify the feature set into different classes in the applied X-ray chest image.</p>
<sec id="s3_1"><label>3.1</label><title>Preprocessing</title>
<p>MF technique is defined as non-linear signal process model that depends upon recent statistics. MF result is defined as <inline-formula id="ieqn-1"><mml:math id="mml-ieqn-1"><mml:mi>g</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:mi>m</mml:mi><mml:mi>e</mml:mi><mml:mi>d</mml:mi><mml:mo fence="false" stretchy="false">{</mml:mo><mml:mrow><mml:mi>f</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mi>W</mml:mi></mml:mrow><mml:mo fence="false" stretchy="false">}</mml:mo></mml:math></inline-formula>, in which <inline-formula id="ieqn-2"><mml:math id="mml-ieqn-2"><mml:mi>f</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>g</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> denote the actual and final images correspondingly and <italic>W</italic> defines the 2D mask: with the size of <inline-formula id="ieqn-3"><mml:math id="mml-ieqn-3"><mml:mi>n</mml:mi><mml:mo>&#x00D7;</mml:mo><mml:mi>n</mml:mi></mml:math></inline-formula> such that <inline-formula id="ieqn-4"><mml:math id="mml-ieqn-4"><mml:mn>3</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mn>5</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:mn>5</mml:mn></mml:math></inline-formula>, etc. After MF is a non-linear filters, numerical examination was highly difficult to image with arbitrary noise. When the image was assigned with zero mean and the noise is under normal distribution, the noise variance of MF has defined as follows.
<disp-formula id="eqn-1"><label>(1)</label><mml:math id="mml-eqn-1" display="block"><mml:msubsup><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mi>m</mml:mi><mml:mi>e</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mn>2</mml:mn></mml:msubsup><mml:mo>=</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:mn>4</mml:mn><mml:mi>n</mml:mi><mml:mrow><mml:msup><mml:mi>f</mml:mi><mml:mn>2</mml:mn></mml:msup></mml:mrow><mml:mover><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo accent="false">&#x00AF;</mml:mo></mml:mover></mml:mrow></mml:mfrac><mml:mo>&#x2248;</mml:mo><mml:mfrac><mml:mrow><mml:msubsup><mml:mi>&#x03C3;</mml:mi><mml:mi>i</mml:mi><mml:mn>2</mml:mn></mml:msubsup></mml:mrow><mml:mrow><mml:mi>n</mml:mi><mml:mo>+</mml:mo><mml:mfrac><mml:mi>&#x03C0;</mml:mi><mml:mn>2</mml:mn></mml:mfrac><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:mfrac><mml:mo>&#x22C5;</mml:mo><mml:mfrac><mml:mi>&#x03C0;</mml:mi><mml:mn>2</mml:mn></mml:mfrac></mml:math></disp-formula>where <inline-formula id="ieqn-5"><mml:math id="mml-ieqn-5"><mml:msubsup><mml:mi>&#x03C3;</mml:mi><mml:mi>i</mml:mi><mml:mn>2</mml:mn></mml:msubsup></mml:math></inline-formula> defines the input noise power, <italic>n</italic> denotes the size of MF and <inline-formula id="ieqn-6"><mml:math id="mml-ieqn-6"><mml:mi>f</mml:mi><mml:mover><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo accent="false">&#x00AF;</mml:mo></mml:mover></mml:math></inline-formula> means the performance of noise intensity. Followed by, the noise variance of average filter was denoted as follows.
<disp-formula id="eqn-2"><label>(2)</label><mml:math id="mml-eqn-2" display="block"><mml:msubsup><mml:mi>&#x03C3;</mml:mi><mml:mn>0</mml:mn><mml:mn>2</mml:mn></mml:msubsup><mml:mo>=</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mi>n</mml:mi></mml:mfrac><mml:msubsup><mml:mi>&#x03C3;</mml:mi><mml:mi>i</mml:mi><mml:mn>2</mml:mn></mml:msubsup></mml:math></disp-formula>
</p>
<fig id="fig-1"><label>Figure 1</label><caption><title>Overall process of the proposed model</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_24312-fig-1.png"/></fig>
<p>When <xref ref-type="disp-formula" rid="eqn-1">(1)</xref> and <xref ref-type="disp-formula" rid="eqn-2">(2)</xref> are compared, it can be inferred that the MF functions are based on two objectives namely the size of mask and noise distribution. MF eliminates the noise considerably, when compared to average filtering. The function of MF is to maximize, when the MF method is integrated with average filtering model.</p>
</sec>
<sec id="s3_2"><label>3.2</label><title>Optimal Hybrid Feature Extraction</title>
<p>OHFE model performs feature extraction process upon the preprocessed image, where OGLCM and HOG features are integrated together.</p>
<sec id="s3_2_1"><label>3.2.1</label><title>HOG Features</title>
<p>A major element in HOG feature was applicable for containing the local procedure of object. The indifference of object conversions and brightness state are to be considered as edge and data-based gradients which are estimated under the application of various coordinate-HOG feature vector. A normal expression, applied in processing gradient point, is depicted in <xref ref-type="disp-formula" rid="eqn-3">Eq. (3)</xref>:
<disp-formula id="eqn-3"><label>(3)</label><mml:math id="mml-eqn-3" display="block"><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mi>x</mml:mi></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mi>N</mml:mi><mml:mo>&#x2217;</mml:mo><mml:mi>I</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mspace width="thickmathspace" /><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mspace width="thickmathspace" /><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mi>x</mml:mi></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:msup><mml:mi>N</mml:mi><mml:mi>T</mml:mi></mml:msup></mml:mrow><mml:mo>&#x2217;</mml:mo><mml:mi>I</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula>
</p>
<p>Image prediction windows are characterized as different spatial areas and are termed as &#x2018;cells&#x2019;. At last, the magnitude of gradients (x, y) is demonstrated in <xref ref-type="disp-formula" rid="eqn-4">Eq. (4)</xref>.
<disp-formula id="eqn-4"><label>(4)</label><mml:math id="mml-eqn-4" display="block"><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mi>x</mml:mi></mml:msub></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:msqrt><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mi>x</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:msup><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:msqrt><mml:mo>+</mml:mo><mml:msqrt><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:msup><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:msqrt></mml:math></disp-formula>
</p>
<p>The edge orientation of the point (x, y) was illustrated in <xref ref-type="disp-formula" rid="eqn-5">Eq. (5)</xref>:
<disp-formula id="eqn-5"><label>(5)</label><mml:math id="mml-eqn-5" display="block"><mml:mi>&#x03B8;</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:mrow><mml:msup><mml:mi>tan</mml:mi><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:mfrac><mml:mrow><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mrow><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mi>x</mml:mi></mml:msub></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mfrac></mml:math></disp-formula>
</p>
<p>Here, Gx and Gy imply the horizontal and vertical directions of the gradients. To improve brightness and noise, a normalized operation is computed next to the determination of histogram values. In contrast, the computation of normalization can be employed and the local histograms can be validated. In comparison with normalization, L2-norm predicts the existence of cancer effectively. The blocks of normalized HOG are showcased in <xref ref-type="disp-formula" rid="eqn-6">Eq. (6)</xref>.
<disp-formula id="eqn-6"><label>(6)</label><mml:math id="mml-eqn-6" display="block"><mml:mrow><mml:mi mathvariant="normal">L</mml:mi></mml:mrow><mml:mn>2</mml:mn><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mi mathvariant="normal">n</mml:mi><mml:mi mathvariant="normal">o</mml:mi><mml:mi mathvariant="normal">r</mml:mi><mml:mi mathvariant="normal">m</mml:mi></mml:mrow><mml:mo>:</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mi mathvariant="normal">f</mml:mi></mml:mrow><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mi mathvariant="normal">h</mml:mi></mml:mrow><mml:mrow><mml:msqrt><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mrow><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mi>h</mml:mi><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow></mml:mrow><mml:msubsup><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mn>2</mml:mn><mml:mn>2</mml:mn></mml:msubsup></mml:msqrt><mml:mo>+</mml:mo><mml:mrow><mml:msup><mml:mrow><mml:mi mathvariant="normal">e</mml:mi></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:mrow></mml:mfrac></mml:math></disp-formula>where <italic>e</italic> depicts a small positive score applied in regularization, <italic>f</italic> represents the feature vector, <italic>h</italic> indicates the non-normalized vector, and <inline-formula id="ieqn-7"><mml:math id="mml-ieqn-7"><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mrow><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mi>h</mml:mi><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow></mml:mrow><mml:msubsup><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mn>2</mml:mn><mml:mn>2</mml:mn></mml:msubsup></mml:math></inline-formula> means 2-norm of HOG normalization.</p>
</sec>
<sec id="s3_2_2"><label>3.2.2</label><title>GLCM Features</title>
<p>In general, &#x2018;texture&#x2019; is defined as the duplicated pattern of local difference present in image intensity. The application of co-occurrence matrix depends upon the identity of grey-level deployment that is applied in texture detection [<xref ref-type="bibr" rid="ref-24">24</xref>]. It is also modified using dense and fine textures, when compared with incomplete textures.</p>
<p>Based on the measures of intensities for all integrations, statistics is categorized as 1<sup>st</sup> order, 2<sup>nd</sup> order and higher-order statistics. In this approach, <inline-formula id="ieqn-8"><mml:math id="mml-ieqn-8"><mml:mi>&#x03BC;</mml:mi></mml:math></inline-formula> denotes the mean value of P. <inline-formula id="ieqn-9"><mml:math id="mml-ieqn-9"><mml:mrow><mml:msub><mml:mi>&#x03BC;</mml:mi><mml:mi>&#x03C7;</mml:mi></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:msub><mml:mi>&#x03BC;</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:msub><mml:mi>&#x03C3;</mml:mi><mml:mi>&#x03C7;</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> and <inline-formula id="ieqn-10"><mml:math id="mml-ieqn-10"><mml:mrow><mml:msub><mml:mi>&#x03C3;</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> denote the means and SD values of <inline-formula id="ieqn-11"><mml:math id="mml-ieqn-11"><mml:mrow><mml:msub><mml:mi>P</mml:mi><mml:mi>&#x03C7;</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> and <inline-formula id="ieqn-12"><mml:math id="mml-ieqn-12"><mml:mrow><mml:msub><mml:mi>P</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow><mml:mo>.</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>G</mml:mi></mml:math></inline-formula> denotes the size of GLCM.</p>
</sec>
<sec id="s3_2_3"><label>3.2.3</label><title>Optimal Feature Selection Using OCS Algorithm</title>
<p>Under the application of texture features such as GLCM and GLRLM, the optimal subset of features is obtained from pre-processing the clinical image. The actual features extracted are not provided for classification, as it consumes the maximum processing time for implementation. Thus, the optimal FS method needs to be selected in which the important features are decided with the help of optimization algorithm named OCS. The developer in literature [<xref ref-type="bibr" rid="ref-25">25</xref>] has introduced a CS method based on crow&#x0027;s behavior in terms of concealing and consuming the food. With respect to crow&#x0027;s hierarchy, the characteristics of CSA are detailed herewith.
<list list-type="simple">
<list-item><label>&#x25A0;</label><p>It is a form of flock</p></list-item>
<list-item><label>&#x25A0;</label><p>It conserves the place, where it hides the food</p></list-item>
<list-item><label>&#x25A0;</label><p>In order to steal, they always fly in rows by following one another</p></list-item>
<list-item><label>&#x25A0;</label><p>By possibility, it protects the caches and prevents it from pilfered.</p></list-item>
</list></p>
<p>The actual as well as novel places of 2 crows are shown in <?A3B2 "fig2",5,"anchor"?><xref ref-type="fig" rid="fig-2">Fig. 2</xref>.</p>
<fig id="fig-2"><label>Figure 2</label><caption><title>Inspiration of CSA. (a) If (f1 &#x003C; 1) (b) if (f1 &#x003E; 1)</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_24312-fig-2.png"/></fig>
<p>In order to improve the classical CS method, contrast task is applied. For all the invoked solutions, the neighboring direction begins to operate. By comparing the solutions, optimal solutions can be accomplished.</p>
<p>The population of crows should be declared i.e., the features obtained from clinical image in terms of <inline-formula id="ieqn-13"><mml:math id="mml-ieqn-13"><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula>, but the initiated crows were placed arbitrarily from search space.
<disp-formula id="eqn-7"><label>(7)</label><mml:math id="mml-eqn-7" display="block"><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:mo>&#x2026;</mml:mo><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mi>n</mml:mi></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>w</mml:mi><mml:mi>h</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>2</mml:mn><mml:mo>,</mml:mo><mml:mn>3</mml:mn><mml:mo>&#x2026;</mml:mo><mml:mi>n</mml:mi></mml:math></disp-formula>
</p>
<p>When the solutions are compared, the optimal one can be chosen as the primary solution. For instance, suppose <inline-formula id="ieqn-14"><mml:math id="mml-ieqn-14"><mml:mi>f</mml:mi><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mo>&#x2208;</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>g</mml:mi><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>h</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> defines a real value. Under the application of opposite point definition, it can be determined as:
<disp-formula id="eqn-8"><label>(8)</label><mml:math id="mml-eqn-8" display="block"><mml:mrow><mml:mover><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow><mml:mo>&#x007E;</mml:mo></mml:mover></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:msub><mml:mi>g</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow><mml:mo>+</mml:mo><mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:math></disp-formula>
</p>
<p>The Fitness Function (FF) of OCS method is defined according to the main purpose of this study. In this approach, optimization is carried out for accomplishing the optimal features from the applied dataset images.
<disp-formula id="eqn-9"><label>(9)</label><mml:math id="mml-eqn-9" display="block"><mml:mi>O</mml:mi><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mi>M</mml:mi><mml:mi>A</mml:mi><mml:mi>X</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>A</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:mi>u</mml:mi><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula>
</p>
<p>When generating a new position, a crow is randomly considered so that it can be developed as a new position under selection of the flock of crows, where the crow &#x2018;<inline-formula id="ieqn-15"><mml:math id="mml-ieqn-15"><mml:mi>j</mml:mi></mml:math></inline-formula>&#x2019; owns a unique location and storage space. The remarkable place of crow <inline-formula id="ieqn-16"><mml:math id="mml-ieqn-16"><mml:mrow><mml:msup><mml:mi>P</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:math></inline-formula> has achieved using the provided in <xref ref-type="disp-formula" rid="eqn-10">Eq. (10)</xref>.
<disp-formula id="eqn-10"><label>(10)</label><mml:math id="mml-eqn-10" display="block"><mml:mrow><mml:msup><mml:mi>P</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mrow><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mrow><mml:msup><mml:mi>P</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msup></mml:mrow><mml:mo>+</mml:mo><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>&#x00D7;</mml:mo><mml:mi>f</mml:mi><mml:mrow><mml:msup><mml:mi>l</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msup></mml:mrow><mml:mo>&#x00D7;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>m</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msup><mml:mi>m</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msup></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msup><mml:mi>P</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mtd><mml:mtd><mml:mrow><mml:mi>i</mml:mi><mml:mi>f</mml:mi><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow><mml:mo>&#x2265;</mml:mo><mml:mi>A</mml:mi><mml:mrow><mml:msup><mml:mi>P</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>P</mml:mi></mml:mrow></mml:mtd><mml:mtd><mml:mrow><mml:mi>o</mml:mi><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>w</mml:mi><mml:mi>i</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo fence="true" stretchy="true" symmetric="true"></mml:mo></mml:mrow></mml:math></disp-formula>
</p>
<p>The maximization of <xref ref-type="disp-formula" rid="eqn-10">Eq. (10)</xref> is defined as follows: <inline-formula id="ieqn-17"><mml:math id="mml-ieqn-17"><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> and <inline-formula id="ieqn-18"><mml:math id="mml-ieqn-18"><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> demonstrate the arbitrary values of crows, <italic>i</italic> and <italic>j</italic> correspondingly from zero and one, <inline-formula id="ieqn-19"><mml:math id="mml-ieqn-19"><mml:mi>f</mml:mi><mml:mrow><mml:msup><mml:mi>l</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:math></inline-formula> depicts the flight length of crow <italic>i</italic>, <italic>P</italic> signifies the location of the crow, <inline-formula id="ieqn-20"><mml:math id="mml-ieqn-20"><mml:mi>m</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msup><mml:mi>m</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:math></inline-formula> represents the storage position of <inline-formula id="ieqn-21"><mml:math id="mml-ieqn-21"><mml:mi>j</mml:mi></mml:math></inline-formula>th crow and <inline-formula id="ieqn-22"><mml:math id="mml-ieqn-22"><mml:mi>A</mml:mi><mml:mrow><mml:msup><mml:mi>P</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:math></inline-formula> resembles the crucial probabilities of crow <italic>j</italic> at iteration.</p>
<p>The position and storage measures of the recently extended crow are calculated based on <xref ref-type="disp-formula" rid="eqn-11">Eq. (11)</xref>.
<disp-formula id="eqn-11"><label>(11)</label><mml:math id="mml-eqn-11" display="block"><mml:mi>m</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msup><mml:mi>m</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mrow><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mrow><mml:msup><mml:mi>P</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:mrow></mml:mtd><mml:mtd><mml:mrow><mml:mi>i</mml:mi><mml:mi>f</mml:mi><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>f</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mrow><mml:msup><mml:mi>P</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msup></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x003E;</mml:mo><mml:mi>f</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>m</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msup><mml:mi>m</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:mi>m</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msup><mml:mi>m</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:mrow></mml:mtd><mml:mtd><mml:mrow><mml:mi>o</mml:mi><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>w</mml:mi><mml:mi>i</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo fence="true" stretchy="true" symmetric="true"></mml:mo></mml:mrow></mml:math></disp-formula>
</p>
<p>It has been clear that the fitness score to a novel position of crow was superlative in previous place. Usually, a crow tries to maximize its storage space by selecting a new position. When iterations reach the maximum value, the optimum place of storage equivalent to the objective with better result of the extracted features. The well-known patterns of OCS method are depicted in <?A3B2 "fig3",5,"anchor"?><xref ref-type="fig" rid="fig-3">Fig. 3</xref>.</p>
<fig id="fig-3"><label>Figure 3</label><caption><title>Opposition based crow search algorithm</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_24312-fig-3.png"/></fig>
</sec>
</sec>
<sec id="s3_3"><label>3.3</label><title>ODNN Based Classification</title>
<p>DNN model is comprised of 3 major elements like input, resultant, and hidden layers. During training stage, DNN maximizes the weight of nodes in hidden states using SSA. The NN frequently fits the labeled training information&#x0027;s solution boundary due to the progressive growth in training iterations. In order to enhance the speed of training process, DNN, classification accuracy, and 2 hidden states are developed. During the hidden state, overall nodes are determined by applying <xref ref-type="disp-formula" rid="eqn-12">Eq. (12)</xref>.
<disp-formula id="eqn-12"><label>(12)</label><mml:math id="mml-eqn-12" display="block"><mml:mi>n</mml:mi><mml:mo>=</mml:mo><mml:msqrt><mml:mi>a</mml:mi><mml:mo>+</mml:mo><mml:mi>b</mml:mi></mml:msqrt><mml:mo>+</mml:mo><mml:mi>c</mml:mi></mml:math></disp-formula>where, the count of input state nodes are represented, and the count of resultant layer nodes are depicted as <italic>b</italic>, the count of hidden state nodes are denoted as <italic>n</italic> and a constant value from 1 and 10 is represented as <italic>c</italic>. The conv1 layer gets an input of <sans-serif>112&#x2009;&#x00D7;&#x2009;112</sans-serif> with 7&#x2009;&#x00D7;&#x2009;7, stride 2.</p>
<p>In order to activate the ability of non-linear fitness, an activation function was comprised in the hidden state of DNN. Here, it applies the sigmoid as activation function which is defined as follows
<disp-formula id="eqn-13"><label>(13)</label><mml:math id="mml-eqn-13" display="block"><mml:mi>S</mml:mi><mml:mo>=</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:mn>1</mml:mn><mml:mo>+</mml:mo><mml:mrow><mml:msup><mml:mi>e</mml:mi><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>x</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:mrow></mml:mfrac><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow></mml:math></disp-formula>
</p>
<p>The input data of a system is named as <italic>x</italic> which is enabled using a mapping function, <inline-formula id="ieqn-23"><mml:math id="mml-ieqn-23"><mml:mrow><mml:msub><mml:mi>M</mml:mi><mml:mi>f</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula>.
<disp-formula id="eqn-14"><label>(14)</label><mml:math id="mml-eqn-14" display="block"><mml:mrow><mml:msub><mml:mi>M</mml:mi><mml:mi>f</mml:mi></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mi>s</mml:mi><mml:mi>i</mml:mi><mml:mi>g</mml:mi><mml:mi>m</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>&#x03C9;</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mi>x</mml:mi><mml:mo>+</mml:mo><mml:mrow><mml:msub><mml:mi>&#x03B2;</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula>where, <inline-formula id="ieqn-24"><mml:math id="mml-ieqn-24"><mml:mi>&#x03C9;</mml:mi></mml:math></inline-formula> and <inline-formula id="ieqn-25"><mml:math id="mml-ieqn-25"><mml:mi>&#x03B2;</mml:mi></mml:math></inline-formula> depict the weight matrix and bias amongst a resultant as well as hidden layers correspondingly. The space of hidden neurons can be aligned manually, and the effective model is named as supervised loss function for DNN. Here, the main element that needs to be applied is the data with sample labels that mimics the human methodologies. Further, it is devised with labeled data sample <inline-formula id="ieqn-26"><mml:math id="mml-ieqn-26"><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>l</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> to hidden layer. The loss structure is determined as follows.
<disp-formula id="eqn-15"><label>(15)</label><mml:math id="mml-eqn-15" display="block"><mml:mi>S</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>W</mml:mi><mml:mi>s</mml:mi></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:msub><mml:mi>b</mml:mi><mml:mi>s</mml:mi></mml:msub></mml:mrow><mml:mo>;</mml:mo><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>l</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:mn>2</mml:mn><mml:mi>m</mml:mi></mml:mrow></mml:mfrac><mml:munderover><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>j</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>m</mml:mi></mml:munderover><mml:mo>&#x2061;</mml:mo><mml:mo fence="false" stretchy="false">&#x2016;</mml:mo><mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>W</mml:mi><mml:mi>s</mml:mi></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:msub><mml:mi>b</mml:mi><mml:mi>s</mml:mi></mml:msub></mml:mrow><mml:mo>;</mml:mo><mml:mi>x</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mi>l</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow><mml:msubsup><mml:mo fence="false" stretchy="false">&#x2016;</mml:mo><mml:mn>2</mml:mn><mml:mn>2</mml:mn></mml:msubsup><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow></mml:math></disp-formula>where <inline-formula id="ieqn-27"><mml:math id="mml-ieqn-27"><mml:mrow><mml:msub><mml:mi>W</mml:mi><mml:mi>s</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> and <inline-formula id="ieqn-28"><mml:math id="mml-ieqn-28"><mml:mrow><mml:msub><mml:mi>b</mml:mi><mml:mi>s</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> define the subsets of biases, while &#x2018;m&#x2019; depicts the count of neurons present in the hidden layer.</p>
<p>Cross Entropy (CE) was applied as a loss function of DNN which is considered to be the configuration of training and testing. The application of CE does not apply the function of sigmoid as well as softmax output frameworks. The loss of CE is determined using <xref ref-type="disp-formula" rid="eqn-16">Eq. (16)</xref>.
<disp-formula id="eqn-16"><label>(16)</label><mml:math id="mml-eqn-16" display="block"><mml:mrow><mml:msub><mml:mi>C</mml:mi><mml:mi>E</mml:mi></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mi>n</mml:mi></mml:mfrac><mml:munderover><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>k</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>n</mml:mi></mml:munderover><mml:mo>&#x2061;</mml:mo><mml:mo stretchy="false">[</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>Y</mml:mi><mml:mi>k</mml:mi></mml:msub></mml:mrow><mml:mi>l</mml:mi><mml:mi>o</mml:mi><mml:mi>g</mml:mi><mml:mrow><mml:msub><mml:mrow><mml:mrow><mml:mover><mml:mi>Y</mml:mi><mml:mo stretchy="false">&#x005E;</mml:mo></mml:mover></mml:mrow></mml:mrow><mml:mi>k</mml:mi></mml:msub></mml:mrow><mml:mo>+</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mi>Y</mml:mi><mml:mi>k</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:mi mathvariant="normal">l</mml:mi><mml:mi mathvariant="normal">o</mml:mi><mml:mi mathvariant="normal">g</mml:mi></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mrow><mml:mover><mml:mi>Y</mml:mi><mml:mo stretchy="false">&#x005E;</mml:mo></mml:mover></mml:mrow></mml:mrow><mml:mi>k</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo stretchy="false">]</mml:mo></mml:math></disp-formula>where, <italic>n</italic> implies the volume of training sample, <inline-formula id="ieqn-100"><mml:math id="mml-ieqn-100"><mml:mrow><mml:msub><mml:mi>Y</mml:mi><mml:mi>k</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> refers to k<sup>th</sup> original result of training set, <inline-formula id="ieqn-29"><mml:math id="mml-ieqn-29"><mml:mrow><mml:msub><mml:mrow><mml:mover><mml:mi>Y</mml:mi><mml:mo stretchy="false">&#x005E;</mml:mo></mml:mover></mml:mrow><mml:mi>k</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> indicates the k<sup>th</sup> defined result of testing set. It is employed with SSA technology for the selection of optimal weight of DNN system.</p>
<p>When enhancing the fitness of the population, the solution value becomes highly significant with SSA. When the values are upgraded, this model shows the fitness value to be shifted towards the optimal result. Besides, the novel and existing results are related to each other. Subsequently, the upcoming iteration achieve better results. Furthermore, it needs the stimulation of population size and count of iterations that result in the execution of the method. It becomes a leading one, when compared to optimized models in terms of minimum processing complexity, time as well as rapid convergence speed. The execution of this method is consolidated in the upcoming sections.</p>
<p>First, initiate the population size, count of iterations as well as the termination condition. Based on the population, optimal and poor solutions are computed by means of objective function. The present solution depends upon the optimal and inferior solutions which have to be modified by applying <xref ref-type="disp-formula" rid="eqn-17">Eq. (17)</xref>.
<disp-formula id="eqn-17"><label>(17)</label><mml:math id="mml-eqn-17" display="block"><mml:msubsup><mml:mi>x</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>G</mml:mi></mml:mrow><mml:mrow><mml:msup><mml:mi></mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x2032;</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>G</mml:mi><mml:mo>&#x00D7;</mml:mo><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi><mml:mo>,</mml:mo><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mspace width="thickmathspace" /><mml:mo>&#x2212;</mml:mo><mml:mspace width="thickmathspace" /><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mspace width="thickmathspace" /><mml:mo>&#x2212;</mml:mo><mml:mspace width="thickmathspace" /><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mn>2</mml:mn><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>w</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi><mml:mo>,</mml:mo><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mspace width="thickmathspace" /><mml:mo>&#x2212;</mml:mo><mml:mspace width="thickmathspace" /><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:msub></mml:mrow></mml:math></disp-formula>where <inline-formula id="ieqn-30"><mml:math id="mml-ieqn-30"><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi><mml:mo>,</mml:mo><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula> and <inline-formula id="ieqn-31"><mml:math id="mml-ieqn-31"><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>w</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi><mml:mo>,</mml:mo><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula> are meant to be the measures of <inline-formula id="ieqn-32"><mml:math id="mml-ieqn-32"><mml:mi>j</mml:mi></mml:math></inline-formula>th variable for optimal candidate and worst candidate, correspondingly; <inline-formula id="ieqn-33"><mml:math id="mml-ieqn-33"><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula> and <inline-formula id="ieqn-34"><mml:math id="mml-ieqn-34"><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mn>2</mml:mn><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula> represent the arbitrary values between [0, 1]. The modified value is compared with the existing ones. In case, the previous one is maximum, then it replaces the old solution; otherwise, it maintains the same [<xref ref-type="bibr" rid="ref-26">26</xref>]. This is followed until the termination condition is reached.</p>
</sec>
<sec id="s3_4"><label>3.4</label><title>Optimal DNN Using SSA</title>
<p>SSA method has been used herewith for parameter tuning in DNN as per the literature [<xref ref-type="bibr" rid="ref-27">27</xref>&#x2013;<xref ref-type="bibr" rid="ref-30">30</xref>]. SSA approach is developed from the foraging behavior of flying squirrels. This is an efficient approach applied by such small animals to migrate far away. When the weather is warm, a squirrel changes its location by jumping from trees in the forest and find the food. It often consumes acorn nuts from which it acquires the energy required for its functioning. Next, it explores for hickory nuts which are better than acorn nut. It saves those nuts for winter season. In case of cold weather, the squirrels become highly vulnerable and survive with energy-rich foods. Followed by, if the weather again changes into warm, squirrels become powerful and effective. Previous strategies are followed in warm season for the exploration of food. Based on food foraging hierarchy of squirrels [<xref ref-type="bibr" rid="ref-31">31</xref>], optimal SSA is developed iteratively in mathematical manner.</p>
<sec id="s3_4_1"><label>3.4.1</label><title>Initialization Phase</title>
<p>There are some significant attributes in SSA namely, maximum value of iteration <inline-formula id="ieqn-35"><mml:math id="mml-ieqn-35"><mml:mi>I</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mrow><mml:mspace width="thickmathspace" /><mml:mi mathvariant="normal">m</mml:mi><mml:mi mathvariant="normal">a</mml:mi><mml:mi mathvariant="normal">x</mml:mi></mml:mrow></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula>, population size <inline-formula id="ieqn-36"><mml:math id="mml-ieqn-36"><mml:mi>N</mml:mi><mml:mi>P</mml:mi></mml:math></inline-formula>, decision variable value <inline-formula id="ieqn-37"><mml:math id="mml-ieqn-37"><mml:mi>n</mml:mi></mml:math></inline-formula>, predator existence possibility <inline-formula id="ieqn-38"><mml:math id="mml-ieqn-38"><mml:mrow><mml:msub><mml:mi>P</mml:mi><mml:mrow><mml:mi>d</mml:mi><mml:mi>p</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula>, scaling factor <inline-formula id="ieqn-39"><mml:math id="mml-ieqn-39"><mml:mi>s</mml:mi><mml:mi>f</mml:mi></mml:math></inline-formula>, gliding constant <inline-formula id="ieqn-40"><mml:math id="mml-ieqn-40"><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mi>c</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> and upper and lower bounds to decision variables, <inline-formula id="ieqn-41"><mml:math id="mml-ieqn-41"><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mi>U</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> and <inline-formula id="ieqn-42"><mml:math id="mml-ieqn-42"><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mi>L</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula>. The existing attributes are initiated from the starting stages of SSA.</p>
</sec>
<sec id="s3_4_2"><label>3.4.2</label><title>Location Initialization Phase</title>
<p>The location of squirrels is loaded randomly from the searching space as shown below:
<disp-formula id="eqn-18"><label>(18)</label><mml:math id="mml-eqn-18" display="block"><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mi>L</mml:mi></mml:msub></mml:mrow><mml:mo>+</mml:mo><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x2217;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mi>U</mml:mi></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mi>L</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mn>2</mml:mn><mml:mo>,</mml:mo><mml:mo>&#x2026;</mml:mo><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>N</mml:mi><mml:mi>P</mml:mi><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>j</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mn>2</mml:mn><mml:mo>,</mml:mo><mml:mo>&#x2026;</mml:mo><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>n</mml:mi></mml:math></disp-formula>where rand <inline-formula id="ieqn-43"><mml:math id="mml-ieqn-43"><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> implies to the uniformly distributed arbitrary scores within zero and one. The fitness measure <inline-formula id="ieqn-44"><mml:math id="mml-ieqn-44"><mml:mi>f</mml:mi><mml:mo>=</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mrow><mml:mi mathvariant="normal">l</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mrow><mml:mi>N</mml:mi><mml:mi>P</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> of a distinct squirrel&#x0027;s location was processed by changing the decision variable with FF:
<disp-formula id="eqn-19"><label>(19)</label><mml:math id="mml-eqn-19" display="block"><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mn>2</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mo>&#x2026;</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mn>2</mml:mn><mml:mo>,</mml:mo><mml:mo>&#x2026;</mml:mo><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>N</mml:mi><mml:mi>P</mml:mi></mml:math></disp-formula>
</p>
<p>Then, the quality of food sources is calculated under the application of fitness measure of a squirrels&#x2019; location as depicted herewith.
<disp-formula id="eqn-20"><label>(20)</label><mml:math id="mml-eqn-20" display="block"><mml:mo stretchy="false">[</mml:mo><mml:mrow><mml:mi>s</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>d</mml:mi><mml:mi mathvariant="normal">&#x005F;</mml:mi><mml:mi>f</mml:mi><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mi>s</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi mathvariant="normal">&#x005F;</mml:mi><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mi>e</mml:mi><mml:mi>x</mml:mi></mml:mrow><mml:mo stretchy="false">]</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mo>=</mml:mo><mml:mi>s</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>t</mml:mi><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>f</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula>
</p>
<p>Besides, the organization of food source is processed. It is composed of three types of trees like, oak tree (acorn nuts), hickory tree, and normal tree. The optimum food source (low fitness) was assumed that the hickory nut tree <inline-formula id="ieqn-45"><mml:math id="mml-ieqn-45"><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>h</mml:mi><mml:mrow><mml:mi mathvariant="normal">r</mml:mi></mml:mrow></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula>, then the consecutive food sources are referred that acorn nut trees <inline-formula id="ieqn-46"><mml:math id="mml-ieqn-46"><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>a</mml:mi><mml:mrow><mml:mi mathvariant="normal">r</mml:mi></mml:mrow></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> while the rest are termed as normal trees <inline-formula id="ieqn-47"><mml:math id="mml-ieqn-47"><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula>:
<disp-formula id="eqn-21"><label>(21)</label><mml:math id="mml-eqn-21" display="block"><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>h</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mi>F</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>s</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:mo>&#x2212;</mml:mo></mml:mrow><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mi>e</mml:mi><mml:mi>x</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mn>1</mml:mn><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula>
<disp-formula id="eqn-22"><label>(22)</label><mml:math id="mml-eqn-22" display="block"><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>a</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mn>1</mml:mn><mml:mrow><mml:mo>:</mml:mo></mml:mrow><mml:mn>3</mml:mn></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:mi>F</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>s</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:mo>&#x2212;</mml:mo></mml:mrow><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mi>e</mml:mi><mml:mi>x</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mn>2</mml:mn><mml:mrow><mml:mo>:</mml:mo></mml:mrow><mml:mn>4</mml:mn></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula>
<disp-formula id="eqn-23"><label>(23)</label><mml:math id="mml-eqn-23" display="block"><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mn>1</mml:mn><mml:mrow><mml:mo>:</mml:mo></mml:mrow><mml:mi>N</mml:mi><mml:mi>P</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>4</mml:mn></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:mi>F</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>s</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:mo>&#x2212;</mml:mo></mml:mrow><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mi>e</mml:mi><mml:mi>x</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mn>5</mml:mn><mml:mrow><mml:mo>:</mml:mo></mml:mrow><mml:mi>N</mml:mi><mml:mi>P</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula>
</p>
</sec>
<sec id="s3_4_3"><label>3.4.3</label><title>Location Creation Phase</title>
<p>The 3 states that represent the dynamic gliding strategy of squirrels are determined as follows.</p>
<p>Scenario 1. The squirrels reside in acorn nut tree jumps to hickory nut tree. Based on this scenario, a novel place is developed in the following way.
<disp-formula id="eqn-24"><label>(24)</label><mml:math id="mml-eqn-24" display="block"><mml:mi>F</mml:mi><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>a</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>n</mml:mi><mml:mi>e</mml:mi><mml:mi>w</mml:mi></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mrow><mml:mtable columnalign="left" rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>F</mml:mi><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>a</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>o</mml:mi><mml:mi>I</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msubsup><mml:mo>+</mml:mo><mml:mrow><mml:msub><mml:mi>d</mml:mi><mml:mi>g</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mi>c</mml:mi></mml:msub></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>F</mml:mi><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>h</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>o</mml:mi><mml:mi>l</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msubsup><mml:mo>&#x2212;</mml:mo><mml:mi>F</mml:mi><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>a</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>o</mml:mi><mml:mi>I</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msubsup></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mi>o</mml:mi><mml:mi>m</mml:mi><mml:mi>l</mml:mi><mml:mi>o</mml:mi><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>t</mml:mi><mml:mi>i</mml:mi><mml:mi>o</mml:mi><mml:mi>n</mml:mi></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo fence="true" stretchy="true" symmetric="true"></mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>i</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thickmathspace" /><mml:mi>R</mml:mi><mml:mo>&#x2265;</mml:mo><mml:mrow><mml:msub><mml:mi>P</mml:mi><mml:mrow><mml:mi>d</mml:mi><mml:mi>p</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:mi>o</mml:mi><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>w</mml:mi><mml:mi>i</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula>where <inline-formula id="ieqn-48"><mml:math id="mml-ieqn-48"><mml:mrow><mml:msub><mml:mi>d</mml:mi><mml:mi>g</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> denotes the random gliding distance, <inline-formula id="ieqn-49"><mml:math id="mml-ieqn-49"><mml:mrow><mml:msub><mml:mi>R</mml:mi><mml:mrow><mml:mi mathvariant="normal">l</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula> implies the function that proceeds with the measure value of uniform distribution between 0 and 1, and <inline-formula id="ieqn-50"><mml:math id="mml-ieqn-50"><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mi>c</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> is the gliding constant.</p>
<p>Scenario 2. Squirrels that reside in normal tree go to acorn nut trees to gather the required food. A new position is deployed using the given function:
<disp-formula id="eqn-25"><label>(25)</label><mml:math id="mml-eqn-25" display="block"><mml:mi>F</mml:mi><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>n</mml:mi><mml:mi>e</mml:mi><mml:mi>w</mml:mi></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mrow><mml:mtable columnalign="left" rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>F</mml:mi><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>o</mml:mi><mml:mi>I</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msubsup><mml:mo>+</mml:mo><mml:mrow><mml:msub><mml:mi>d</mml:mi><mml:mi>g</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mi>c</mml:mi></mml:msub></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>F</mml:mi><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>a</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>o</mml:mi><mml:mi>l</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msubsup><mml:mo>&#x2212;</mml:mo><mml:mi>F</mml:mi><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>o</mml:mi><mml:mi>I</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msubsup></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mi>o</mml:mi><mml:mi>m</mml:mi><mml:mi>l</mml:mi><mml:mi>o</mml:mi><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>t</mml:mi><mml:mi>i</mml:mi><mml:mi>o</mml:mi><mml:mi>n</mml:mi></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo fence="true" stretchy="true" symmetric="true"></mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>i</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thickmathspace" /><mml:mi>R</mml:mi><mml:mo>&#x2265;</mml:mo><mml:mrow><mml:msub><mml:mi>P</mml:mi><mml:mrow><mml:mi>d</mml:mi><mml:mi>p</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:mi>o</mml:mi><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>w</mml:mi><mml:mi>i</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula>where <inline-formula id="ieqn-51"><mml:math id="mml-ieqn-51"><mml:mrow><mml:msub><mml:mi>R</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow></mml:math></inline-formula> is a function which offers the measure of uniform distribution within zero and one.</p>
<p>Scenario 3. Any squirrel on normal tree go to hickory nut tree, if it meets the routine objective. At this point, a novel location of the squirrel was established as provided below.
<disp-formula id="eqn-26"><label>(26)</label><mml:math id="mml-eqn-26" display="block"><mml:mi>F</mml:mi><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>n</mml:mi><mml:mi>e</mml:mi><mml:mi>w</mml:mi></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mrow><mml:mtable columnalign="left" rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>F</mml:mi><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>o</mml:mi><mml:mi>l</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msubsup><mml:mo>+</mml:mo><mml:mrow><mml:msub><mml:mi>d</mml:mi><mml:mi>g</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mi>c</mml:mi></mml:msub></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>F</mml:mi><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>h</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>o</mml:mi><mml:mi>l</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msubsup><mml:mo>&#x2212;</mml:mo><mml:mi>F</mml:mi><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>o</mml:mi><mml:mi>l</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msubsup></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mi>o</mml:mi><mml:mi>m</mml:mi><mml:mi>l</mml:mi><mml:mi>o</mml:mi><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>t</mml:mi><mml:mi>i</mml:mi><mml:mi>o</mml:mi><mml:mi>n</mml:mi></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo fence="true" stretchy="true" symmetric="true"></mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>i</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thickmathspace" /><mml:mi>R</mml:mi><mml:mo>&#x2265;</mml:mo><mml:mrow><mml:msub><mml:mi>P</mml:mi><mml:mrow><mml:mi>d</mml:mi><mml:mi>p</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:mi>o</mml:mi><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>w</mml:mi><mml:mi>i</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula>where <inline-formula id="ieqn-52"><mml:math id="mml-ieqn-52"><mml:mrow><mml:msub><mml:mi>R</mml:mi><mml:mn>3</mml:mn></mml:msub></mml:mrow></mml:math></inline-formula> indicates the function that suggests to measure of uniform distribution between zero and on. So, such measures are maximum which invokes high perturbations in <xref ref-type="disp-formula" rid="eqn-24">(24)</xref>&#x2013;<xref ref-type="disp-formula" rid="eqn-24">(26)</xref> a. For accomplishing an applicable model, a Scaling Factor (sf) was applied as divisor of <inline-formula id="ieqn-53"><mml:math id="mml-ieqn-53"><mml:mrow><mml:msub><mml:mi>d</mml:mi><mml:mi>g</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> with a measure of 18.</p>
</sec>
<sec id="s3_4_4"><label>3.4.4</label><title>Seasonal Monitoring Criteria Validation</title>
<p>The foraging nature of the flying squirrels depend upon the varying seasons. Hence, seasonal observation should be done so that the trapping can be eliminated in local optimum outcome. The seasonal constant <inline-formula id="ieqn-54"><mml:math id="mml-ieqn-54"><mml:mi>S</mml:mi><mml:mi>c</mml:mi></mml:math></inline-formula> as well as minimum values are managed at the primary stage itself as provided herewith.
<disp-formula id="eqn-27"><label>(27)</label><mml:math id="mml-eqn-27" display="block"><mml:msubsup><mml:mi>S</mml:mi><mml:mi>c</mml:mi><mml:mi>t</mml:mi></mml:msubsup><mml:mo>=</mml:mo><mml:msqrt><mml:munderover><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>k</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>n</mml:mi></mml:munderover><mml:mo>&#x2061;</mml:mo><mml:mrow><mml:msup><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>F</mml:mi><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>a</mml:mi><mml:mi>t</mml:mi><mml:mo>,</mml:mo><mml:mi>k</mml:mi></mml:mrow><mml:mi>t</mml:mi></mml:msubsup><mml:mo>&#x2212;</mml:mo><mml:mi>F</mml:mi><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>h</mml:mi><mml:mi>t</mml:mi><mml:mo>,</mml:mo><mml:mi>k</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:msqrt><mml:mo>,</mml:mo><mml:mi>t</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mn>2</mml:mn><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mn>3</mml:mn></mml:math></disp-formula>
<disp-formula id="eqn-28"><label>(28)</label><mml:math id="mml-eqn-28" display="block"><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>c</mml:mi><mml:mrow><mml:mi mathvariant="normal">m</mml:mi><mml:mi mathvariant="normal">i</mml:mi><mml:mi mathvariant="normal">n</mml:mi><mml:mspace width="thickmathspace" /></mml:mrow></mml:mrow></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mn>10</mml:mn><mml:mi>E</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>6</mml:mn></mml:mrow><mml:mrow><mml:mrow><mml:msup><mml:mrow><mml:mn>365</mml:mn></mml:mrow><mml:mrow><mml:mi>I</mml:mi><mml:mrow><mml:mi mathvariant="normal">t</mml:mi></mml:mrow><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mrow><mml:mo>/</mml:mo></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>I</mml:mi><mml:mrow><mml:mi mathvariant="normal">t</mml:mi></mml:mrow><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mrow><mml:mspace width="thickmathspace" /><mml:mi mathvariant="normal">m</mml:mi><mml:mi mathvariant="normal">a</mml:mi><mml:mi mathvariant="normal">x</mml:mi></mml:mrow></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:mo>/</mml:mo></mml:mrow><mml:mn>2.5</mml:mn></mml:mrow></mml:msup></mml:mrow></mml:mrow></mml:mfrac></mml:math></disp-formula>
</p>
<p>For <inline-formula id="ieqn-55"><mml:math id="mml-ieqn-55"><mml:msubsup><mml:mi>S</mml:mi><mml:mi>c</mml:mi><mml:mi>t</mml:mi></mml:msubsup><mml:mo>&#x003C;</mml:mo><mml:mrow><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>c</mml:mi><mml:mrow><mml:mspace width="thickmathspace" /><mml:mi mathvariant="normal">m</mml:mi><mml:mi mathvariant="normal">i</mml:mi><mml:mi mathvariant="normal">n</mml:mi></mml:mrow></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula>, the winter becomes maximal and the squirrels loses its exploring capability and changes the way of searching food source and position:
<disp-formula id="eqn-29"><label>(29)</label><mml:math id="mml-eqn-29" display="block"><mml:mi>F</mml:mi><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>n</mml:mi><mml:mi>e</mml:mi><mml:mi>w</mml:mi></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:mi>F</mml:mi><mml:mi>S</mml:mi><mml:mrow><mml:msub><mml:mi></mml:mi><mml:mi>L</mml:mi></mml:msub></mml:mrow><mml:mo>+</mml:mo><mml:mi>L</mml:mi><mml:mrow><mml:mrow><mml:mrow><mml:mover><mml:mi>e</mml:mi><mml:mo>&#x00B4;</mml:mo></mml:mover></mml:mrow></mml:mrow></mml:mrow><mml:mi>v</mml:mi><mml:mi>y</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi mathvariant="normal">n</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x00D7;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>F</mml:mi><mml:mi>S</mml:mi><mml:mrow><mml:msub><mml:mi></mml:mi><mml:mi>U</mml:mi></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>F</mml:mi><mml:mi>S</mml:mi><mml:mrow><mml:msub><mml:mi></mml:mi><mml:mi>L</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula>where L&#x00E9;vy distribution is a highly remarkable device applied for improving the global searching to optimized models:
<disp-formula id="eqn-30"><label>(30)</label><mml:math id="mml-eqn-30" display="block"><mml:mi>L</mml:mi><mml:mrow><mml:mrow><mml:mrow><mml:mover><mml:mi>e</mml:mi><mml:mo>&#x00B4;</mml:mo></mml:mover></mml:mrow></mml:mrow></mml:mrow><mml:mi>v</mml:mi><mml:mi>y</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:mn>0.01</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:mfrac><mml:mrow><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mi>a</mml:mi></mml:msub></mml:mrow><mml:mo>&#x00D7;</mml:mo><mml:mi>&#x03C3;</mml:mi></mml:mrow><mml:mrow><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mi>b</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:msup><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn><mml:mrow><mml:mo>/</mml:mo></mml:mrow><mml:mi>&#x03B2;</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:mrow></mml:mfrac></mml:math></disp-formula>
<disp-formula id="eqn-31"><label>(31)</label><mml:math id="mml-eqn-31" display="block"><mml:mi>&#x03C3;</mml:mi><mml:mo>=</mml:mo><mml:mrow><mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mfrac><mml:mrow><mml:mi>&#x0393;</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mn>1</mml:mn><mml:mo>+</mml:mo><mml:mi>&#x03B2;</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x00D7;</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /><mml:mi mathvariant="normal">s</mml:mi><mml:mi mathvariant="normal">i</mml:mi><mml:mi mathvariant="normal">n</mml:mi><mml:mspace width="thickmathspace" /></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>&#x03C0;</mml:mi><mml:mi>&#x03B2;</mml:mi><mml:mrow><mml:mo>/</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mrow><mml:mi>&#x0393;</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mn>1</mml:mn><mml:mo>+</mml:mo><mml:mi>&#x03B2;</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:mo>/</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x00D7;</mml:mo><mml:mi>&#x03B2;</mml:mi><mml:mo>&#x00D7;</mml:mo><mml:mrow><mml:msup><mml:mn>2</mml:mn><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>&#x03B2;</mml:mi><mml:mrow><mml:mo>&#x2212;</mml:mo></mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:mo>/</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:msup></mml:mrow></mml:mrow></mml:mfrac></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn><mml:mrow><mml:mo>/</mml:mo></mml:mrow><mml:mi>&#x03B2;</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:math></disp-formula>
</p>
</sec>
<sec id="s3_4_5"><label>3.4.5</label><title>End Condition</title>
<p>This method traps, if the maximum constraints are satisfied. Otherwise, the nature of developing new place and approving the seasonal observation should be followed repeatedly.</p>
</sec>
</sec>
</sec>
<sec id="s4"><label>4</label><title>Performance Validation</title>
<sec id="s4_1"><label>4.1</label><title>Implementation Setup</title>
<p>The performance of the proposed OHFE-ODNN method was tested utilizing a set of chest X-ray [<xref ref-type="bibr" rid="ref-32">32</xref>] image dataset including 220 images from COVID-19 positive patients, 27 Normal patients and 11 images from SARS-11 positive patients. Some of the test images are displayed in <?A3B2 "fig4",5,"anchor"?><xref ref-type="fig" rid="fig-4">Fig. 4</xref>. The parameters contained in the simulation procedure are learning rate: 0.0001, momentum: 0.9, batch size: 128 and epoch count: 140.</p>
<fig id="fig-4"><label>Figure 4</label><caption><title>(a) Covid-19 (b) normal (c) SARS</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_24312-fig-4.png"/></fig>
</sec>
<sec id="s4_2"><label>4.2</label><title>Result Analysis</title>
<p><?A3B2 "fig5",5,"anchor"?><xref ref-type="fig" rid="fig-5">Fig. 5</xref> demonstrates the confusion matrix generated by OHFE-ODNN method on the classifier of binary classes under five runs. During run 1, it is noted that OHFE-ODNN model achieved a TP of 212, TN of 23, FP of 8 and an FN of 4. In run 2, it is evident that OHFE-ODNN method accomplished a TP of 210, TN of 22, FP of 10 and an FN of 5. While at run 3, it is pointed that the proposed OHFE-ODNN approach obtained a TP of 209, TN of 21, FP of 11 and an FN of 6. During run 4, it is clear that the proposed OHFE-ODNN technique reached a TP of 212, TN of 20, FP of 8 and an FN of 7. At run 5, it is depicted that OHFE-ODNN approach attained a TP of 211, TN of 21, FP of 9 and an FN of 6.</p>
<fig id="fig-5"><label>Figure 5</label><caption><title>(a) True positive (b) True negative (c) False positive (d) False negative</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_24312-fig-5.png"/></fig>
<p><?A3B2 "fig6",5,"anchor"?><xref ref-type="fig" rid="fig-6">Fig. 6</xref> shows the binary classification outcomes of the proposed OHFE-ODNN model under varying measures and distinct number of runs. Under run 1, the proposed OHFE-ODNN technique reached a maximum sens. of 98.14&#x0025;, spec. of 74.19&#x0025;, prec. of 96.36&#x0025;, acc. of 95.14&#x0025;, F1-score of 97.25&#x0025;, and MCC of 76.81&#x0025;. Under the implementation of run 2, the presented OHFE-ODNN method accomplished an optimal sens. of 97.67&#x0025;, spec. of 68.75&#x0025;, prec. of 95.45&#x0025;, acc. of 93.93&#x0025;, F1-score of 96.55&#x0025;, and MCC of 71.49&#x0025;. At run 3, the projected OHFE-ODNN approach achieved a high sens. of 97.21&#x0025;, spec. of 65.63&#x0025;, prec. of 95&#x0025;, acc. of 93.12&#x0025;, F1-score of 96.09&#x0025;, and MCC of 67.62&#x0025;. When the experiment was conducted at run 4, the developed OHFE-ODNN approach accomplished a high sens. of 96.80&#x0025;, spec. of 93.93&#x0025;, prec. of 96.36&#x0025;, acc. of 93.93&#x0025;, F1-score of 96.58&#x0025;, and MCC of 69.33&#x0025;. Under the execution of run 5, the applied OHFE-ODNN technique obtained a better sens. of 97.24&#x0025;, spec. of 70&#x0025;, prec. of 95.91&#x0025;, acc. of 93.92&#x0025;, F1-score of 96.57&#x0025;, and MCC of 70.39&#x0025;.</p>
<fig id="fig-6"><label>Figure 6</label><caption><title>Binary classification analysis of OHFE-ODNN model with different measures</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_24312-fig-6.png"/></fig>
<p><?A3B2 "fig7",5,"anchor"?><xref ref-type="fig" rid="fig-7">Fig. 7</xref> shows the multi classification result of the proposed OHFE-ODNN approach with respect to diverse scores, under various runs. At run 1, the presented OHFE-ODNN scheme accomplished a higher sens. of 95.67&#x0025;, spec. of 87.30&#x0025;, prec. of 93.40&#x0025;, acc. of 95.90&#x0025;, F1-score of 95.36&#x0025;, and MCC of 84.56&#x0025;. Under the execution of run 2, the projected OHFE-ODNN technique achieved an optimal sens. of 96.89&#x0025;, spec. of 89.12&#x0025;, prec. of 96.80&#x0025;, acc. of 94.60&#x0025;, F1-score of 95.32&#x0025;, and MCC of 83.47&#x0025;. Under the implementation of run 3, the deployed OHFE-ODNN technology gained a high sens. of 94.50&#x0025;, spec. of 92.40&#x0025;, prec. of 95.20&#x0025;, acc. of 95.33&#x0025;, F1-score of 96.54&#x0025;, and MCC of 85.23&#x0025;. Under the execution of run 4, the deployed OHFE-ODNN framework obtained a maximal sens. of 96.82&#x0025;, spec. of 92.34&#x0025;, prec. of 96.83&#x0025;, acc. of 96.70&#x0025;, F1-score of 96.33&#x0025;, and MCC of 84.84&#x0025;. Under the representation of run 5, the implied OHFE-ODNN technique reached a better sens. of 95.30&#x0025;, spec. of 91.39&#x0025;, prec. of 95.92&#x0025;, acc. of 95.47&#x0025;, F1-score of 95.09&#x0025;, and MCC of 85.31&#x0025;.</p>
<p><?A3B2 "fig8",5,"anchor"?><xref ref-type="fig" rid="fig-8">Fig. 8</xref> illustrates the results of average analysis of OHFE-ODNN approach under different measures. The figure states that the proposed OHFE-ODNN technique performed binary classification with a sens. of 97.41&#x0025;, spec. of 74.5&#x0025;, prec. of 95.82&#x0025;, acc. of 94.01&#x0025;, F1-score of 96.61&#x0025; and MCC of 71.13&#x0025;. Similarly, OHFE-ODNN model exhibited better multiclass classification with a sens. of 95.84&#x0025;, spec. of 90.51&#x0025;, prec. of 95.63&#x0025;, acc. of 95.6&#x0025;, F1-score of 95.73&#x0025; and MCC of 84.68&#x0025;.</p>
<fig id="fig-7"><label>Figure 7</label><caption><title>Multi classification analysis of OHFE-ODNN method with varying measures</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_24312-fig-7.png"/></fig>
<fig id="fig-8"><label>Figure 8</label><caption><title>Average analysis of OHFE-ODNN model under different measures</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_24312-fig-8.png"/></fig>
</sec>
</sec>
<sec id="s5"><label>5</label><title>Conclusion</title>
<p>The current research work developed an automated COVID-19 analysis model utilizing OHFE-ODNN technique in chest x-ray images. The input images are pre-processed using MF approach. Then, the hybrid set of OGLCM and HOG features are extracted. Finally, ODNN with SSA was executed for classifying the feature set as to distinct classes in the applied X-ray chest images. OHFE is a combination of optimal GLCM and HOG features, where the optimal set of features are chosen by OCS algorithm. ODNN model includes SSA to fine tune the parameters of DNN. The experimental results validated the supremacy of the proposed OHFE-ODNN model since it gained a maximum accuracy of 94.01&#x0025; and 95.60&#x0025; on binary and multi-class classification of chest X-ray images.</p>
</sec>
</body>
<back>
<ack>
<p>The authors would like to acknowledge the support of Prince Sultan University, Riyadh, Saudi Arabia for partially supporting this project and for paying the Article Processing Charges (APC) of this publication.</p>
</ack>
<fn-group>
<fn fn-type="other"><p><bold>Funding Statement:</bold> The authors extend their appreciation to the Deanship of Scientific Research at King Khalid University for funding this work under Grant Number (RGP.1/172/42). <uri xlink:href="https://www.kku.edu.sa">www.kku.edu.sa</uri>.</p></fn>
<fn fn-type="conflict"><p><bold>Conflicts of Interest:</bold> The authors declare that they have no conflicts of interest to report regarding the present study.</p></fn>
</fn-group>
<ref-list content-type="authoryear">
<title>References</title>
<ref id="ref-1"><label>[1]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K. V.</given-names> <surname>Holmes</surname></string-name></person-group>, &#x201C;<article-title>SARS-associated coronavirus</article-title>,&#x201D; <source>New England Journal of Medicine</source>, vol. <volume>348</volume>, no. <issue>20</issue>, pp. <fpage>1948</fpage>&#x2013;<lpage>1951</lpage>, <year>May 2003</year>.</mixed-citation></ref>
<ref id="ref-2"><label>[2]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>P.</given-names> <surname>Romagnani</surname></string-name>, <string-name><given-names>G.</given-names> <surname>Gnone</surname></string-name>, <string-name><given-names>F.</given-names> <surname>Guzzi</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Negrini</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Guastalla</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>The COVID-19 infection: Lessons from the Italian experience</article-title>,&#x201D; <source>Journal of Public Health Policy</source>, vol. <volume>41</volume>, no. <issue>3</issue>, pp. <fpage>238</fpage>&#x2013;<lpage>244</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-3"><label>[3]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M. Y.</given-names> <surname>Ng</surname></string-name>, <string-name><given-names>E. Y.</given-names> <surname>Lee</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Yang</surname></string-name>, <string-name><given-names>F.</given-names> <surname>Yang</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Li</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Imaging profile of the COVID-19 infection: Radiologic findings and literature review</article-title>,&#x201D; <source>Radiology: Cardiothoracic Imaging</source>, vol. <volume>2</volume>, no. <issue>1</issue>, pp. <fpage>e200034</fpage>, <year>Feb. 2020</year>.</mixed-citation></ref>
<ref id="ref-4"><label>[4]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Y.</given-names> <surname>Yu</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Lin</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Meng</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Wei</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Guo</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Deep transfer learning for modality classification of medical images</article-title>,&#x201D; <source>Information</source>, vol. <volume>8</volume>, no. <issue>3</issue>, pp. <fpage>91</fpage>, <year>2017</year>.</mixed-citation></ref>
<ref id="ref-5"><label>[5]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>P.</given-names> <surname>Nardelli</surname></string-name>, <string-name><given-names>D. J.</given-names> <surname>Carretero</surname></string-name>, <string-name><given-names>D. B.</given-names> <surname>Pelaez</surname></string-name>, <string-name><given-names>G. R.</given-names> <surname>Washko</surname></string-name>, <string-name><given-names>F. N.</given-names> <surname>Rahaghi</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Pulmonary artery&#x2013;vein classification in ct images using deep learning</article-title>,&#x201D; <source>IEEE Transactions on Medical Imaging</source>, vol. <volume>37</volume>, no. <issue>11</issue>, pp. <fpage>2428</fpage>&#x2013;<lpage>2440</lpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-6"><label>[6]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>H.</given-names> <surname>Shin</surname></string-name>, <string-name><given-names>H. R.</given-names> <surname>Roth</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Gao</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Lu</surname></string-name>, <string-name><given-names>Z.</given-names> <surname>Xu</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Deep convolutional neural networks for computer-aided detection: Cnn architectures, dataset characteristics and transfer learning</article-title>,&#x201D; <source>IEEE Transactions on Medical Imaging</source>, vol. <volume>35</volume>, no. <issue>5</issue>, pp. <fpage>1285</fpage>&#x2013;<lpage>1298</lpage>, <year>2016</year>.</mixed-citation></ref>
<ref id="ref-7"><label>[7]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Y.</given-names> <surname>Xie</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Xia</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Zhang</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Song</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Feng</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Knowledge-based collaborative deep learning for benign-malignant lung nodule classification on chest ct</article-title>,&#x201D; <source>IEEE Transactions on Medical Imaging</source>, vol. <volume>38</volume>, no. <issue>4</issue>, pp. <fpage>991</fpage>&#x2013;<lpage>1004</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-8"><label>[8]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J. R.</given-names> <surname>Hagerty</surname></string-name>, <string-name><given-names>R. J.</given-names> <surname>Stanley</surname></string-name>, <string-name><given-names>H. A.</given-names> <surname>Almubarak</surname></string-name>, <string-name><given-names>N.</given-names> <surname>Lama</surname></string-name>, <string-name><given-names>R.</given-names> <surname>Kasmi</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Deep learning and handcrafted method fusion: Higher diagnostic accuracy for melanoma dermoscopy images</article-title>,&#x201D; <source>IEEE Journal of Biomedical and Health Informatics</source>, vol. <volume>23</volume>, no. <issue>4</issue>, pp. <fpage>1385</fpage>&#x2013;<lpage>1391</lpage>, <year>Jul. 2019</year>.</mixed-citation></ref>
<ref id="ref-9"><label>[9]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S. E.</given-names> <surname>Gerard</surname></string-name>, <string-name><given-names>T. J.</given-names> <surname>Patton</surname></string-name>, <string-name><given-names>G. E.</given-names> <surname>Christensen</surname></string-name>, <string-name><given-names>J. E.</given-names> <surname>Bayouth</surname></string-name> and <string-name><given-names>J. M.</given-names> <surname>Reinhardt</surname></string-name></person-group>, &#x201C;<article-title>FissureNet: A deep learning approach for pulmonary fissure detection in ct images</article-title>,&#x201D; <source>IEEE Transactions on Medical Imaging</source>, vol. <volume>38</volume>, no. <issue>1</issue>, pp. <fpage>156</fpage>&#x2013;<lpage>166</lpage>, <year>Jan. 2019</year>.</mixed-citation></ref>
<ref id="ref-10"><label>[10]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A. A. A.</given-names> <surname>Setio</surname></string-name>, <string-name><given-names>F.</given-names> <surname>Ciompi</surname></string-name>, <string-name><given-names>G.</given-names> <surname>Litjens</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Gerke</surname></string-name>, <string-name><given-names>C.</given-names> <surname>Jacobs</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Pulmonary nodule detection in ct images: False positive reduction using multi-view convolutional networks</article-title>,&#x201D; <source>IEEE Transactions on Medical Imaging</source>, vol. <volume>35</volume>, no. <issue>5</issue>, pp. <fpage>1160</fpage>&#x2013;<lpage>1169</lpage>, <year>May 2016</year>.</mixed-citation></ref>
<ref id="ref-11"><label>[11]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Xia</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Yin</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Qian</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Jiang</surname></string-name> and <string-name><given-names>S.</given-names> <surname>Wang</surname></string-name></person-group>, &#x201C;<article-title>Liver semantic segmentation algorithm based on improved deep adversarial networks in combination of weighted loss function on abdominal ct images</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>7</volume>, pp. <fpage>96349</fpage>&#x2013;<lpage>96358</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-12"><label>[12]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Pezeshk</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Hamidian</surname></string-name>, <string-name><given-names>N.</given-names> <surname>Petrick</surname></string-name> and <string-name><given-names>B.</given-names> <surname>Sahiner</surname></string-name></person-group>, &#x201C;<article-title>3-D convolutional neural networks for automatic detection of pulmonary nodules in chest ct</article-title>,&#x201D; <source>IEEE Journal of Biomedical and Health Informatics</source>, vol. <volume>23</volume>, no. <issue>5</issue>, pp. <fpage>2080</fpage>&#x2013;<lpage>2090</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-13"><label>[13]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Zreik</surname></string-name>, <string-name><given-names>R. W. v.</given-names> <surname>Hamersvelt</surname></string-name>, <string-name><given-names>J. M.</given-names> <surname>Wolterink</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Leiner</surname></string-name>, <string-name><given-names>M. A.</given-names> <surname>Viergever</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>A recurrent cnn for automatic detection and classification of coronary artery plaque and stenosis in coronary ct angiography</article-title>,&#x201D; <source>IEEE Transactions on Medical Imaging</source>, vol. <volume>38</volume>, no. <issue>7</issue>, pp. <fpage>1588</fpage>&#x2013;<lpage>1598</lpage>, <year>Jul. 2019</year>.</mixed-citation></ref>
<ref id="ref-14"><label>[14]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Bhandary</surname></string-name>, <string-name><given-names>G. A.</given-names> <surname>Prabhu</surname></string-name>, <string-name><given-names>V.</given-names> <surname>Rajinikanth</surname></string-name>, <string-name><given-names>K. P.</given-names> <surname>Thanaraj</surname></string-name>, <string-name><given-names>S. C.</given-names> <surname>Satapathy</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Deep-learning framework to detect lung abnormality&#x2013;A study with chest X-ray and lung CT scan images</article-title>,&#x201D; <source>Pattern Recognition Letters</source>, vol. <volume>129</volume>, pp. <fpage>271</fpage>&#x2013;<lpage>278</lpage>, <year>Jan. 2020</year>.</mixed-citation></ref>
<ref id="ref-15"><label>[15]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>X. W.</given-names> <surname>Gao</surname></string-name>, <string-name><given-names>C. J.</given-names> <surname>Reynolds</surname></string-name> and <string-name><given-names>E.</given-names> <surname>Currie</surname></string-name></person-group>, &#x201C;<article-title>Analysis of tuberculosis severity levels from CT pulmonary images based on enhanced residual deep learning architecture</article-title>,&#x201D; <source>Neurocomputing</source>, vol. <volume>392</volume>, pp. <fpage>233</fpage>&#x2013;<lpage>244</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-16"><label>[16]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>H. S.</given-names> <surname>Pannu</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Singh</surname></string-name> and <string-name><given-names>A. K.</given-names> <surname>Malhi</surname></string-name></person-group>, &#x201C;<article-title>Improved particle swarm optimization based adaptive neuro-fuzzy inference system for benzene detection</article-title>,&#x201D; <source>Clean-Soil, Air, Water</source>, vol. <volume>46</volume>, no. <issue>5</issue>, pp. <fpage>1700162</fpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-17"><label>[17]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>X.</given-names> <surname>Zeng</surname></string-name>, <string-name><given-names>W.</given-names> <surname>Ouyang</surname></string-name>, <string-name><given-names>B.</given-names> <surname>Yang</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Yan</surname></string-name> and <string-name><given-names>X.</given-names> <surname>Wang</surname></string-name></person-group>, &#x201C;<article-title>Gated bi-directional CNN for object detection</article-title>,&#x201D; in <conf-name>European Conf. on Computer Vision</conf-name>, <conf-loc>Cham</conf-loc>, <publisher-name>Springer</publisher-name>, pp. <fpage>354</fpage>&#x2013;<lpage>369</lpage>, <year>2016</year>.</mixed-citation></ref>
<ref id="ref-18"><label>[18]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>O.</given-names> <surname>Dorgham</surname></string-name>, <string-name><given-names>B. A.</given-names> <surname>Rahamneh</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Almomani</surname></string-name>, <string-name><given-names>M. A.</given-names> <surname>Hadidi</surname></string-name> and <string-name><given-names>K. F.</given-names> <surname>Khatatneh</surname></string-name></person-group>, &#x201C;<article-title>Enhancing the security of exchanging and storing dicom medical images on the cloud</article-title>,&#x201D; <source>International Journal of Cloud Applications and Computing</source>, vol. <volume>8</volume>, no. <issue>1</issue>, pp. <fpage>154</fpage>&#x2013;<lpage>172</lpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-19"><label>[19]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Kumar</surname></string-name></person-group>, &#x201C;<article-title>Design of secure image fusion technique using cloud for privacy-preserving and copyright protection</article-title>,&#x201D; <source>International Journal of Cloud Applications and Computing</source>, vol. <volume>9</volume>, no. <issue>3</issue>, pp. <fpage>22</fpage>&#x2013;<lpage>36</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-20"><label>[20]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Gudivada</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Philips</surname></string-name> and <string-name><given-names>N.</given-names> <surname>Tabrizi</surname></string-name></person-group>, &#x201C;<article-title>Developing concept enriched models for big data processing within the medical domain</article-title>,&#x201D; <source>International Journal of Software Science and Computational Intelligence (IJSSCI)</source>, vol. <volume>12</volume>, no. <issue>3</issue>, pp. <fpage>55</fpage>&#x2013;<lpage>71</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-21"><label>[21]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J.</given-names> <surname>Sarivougioukas</surname></string-name> and <string-name><given-names>A.</given-names> <surname>Vagelatos</surname></string-name></person-group>, &#x201C;<article-title>Modeling deep learning neural networks with denotational mathematics in ubihealth environment</article-title>,&#x201D; <source>International Journal of Software Science and Computational Intelligence</source>, vol. <volume>12</volume>, no. <issue>3</issue>, pp. <fpage>14</fpage>&#x2013;<lpage>27</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-22"><label>[22]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Ghoneim</surname></string-name>, <string-name><given-names>G.</given-names> <surname>Muhammad</surname></string-name>, <string-name><given-names>S. U.</given-names> <surname>Amin</surname></string-name> and <string-name><given-names>B.</given-names> <surname>Gupta</surname></string-name></person-group>, &#x201C;<article-title>Medical image forgery detection for smart healthcare</article-title>,&#x201D; <source>IEEE Communications Magazine</source>, vol. <volume>56</volume>, no. <issue>4</issue>, pp. <fpage>33</fpage>&#x2013;<lpage>37</lpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-23"><label>[23]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>N. E. H.</given-names> <surname>Gol&#x00E9;a</surname></string-name> and <string-name><given-names>K. E.</given-names> <surname>Melkemi</surname></string-name></person-group>, &#x201C;<article-title>ROI-Based fragile watermarking for medical image tamper detection</article-title>,&#x201D; <source>International Journal of High Performance Computing and Networking</source>, vol. <volume>13</volume>, no. <issue>2</issue>, pp. <fpage>199</fpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-24"><label>[24]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>N. H.</given-names> <surname>Rajini</surname></string-name> and <string-name><given-names>R.</given-names> <surname>Bhavani</surname></string-name></person-group>, &#x201C;<article-title>Computer aided detection of ischemic stroke using segmentation and texture features</article-title>,&#x201D; <source>Measurement</source>, vol. <volume>46</volume>, no. <issue>6</issue>, pp. <fpage>1865</fpage>&#x2013;<lpage>1874</lpage>, <year>2013</year>.</mixed-citation></ref>
<ref id="ref-25"><label>[25]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>R. J. S.</given-names> <surname>Raj</surname></string-name>, <string-name><given-names>S. J.</given-names> <surname>Shobana</surname></string-name>, <string-name><given-names>I. V.</given-names> <surname>Pustokhina</surname></string-name>, <string-name><given-names>D. A.</given-names> <surname>Pustokhin</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Gupta</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Optimal feature selection-based medical image classification using deep learning model in internet of medical things</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>8</volume>, pp. <fpage>58006</fpage>&#x2013;<lpage>58017</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-26"><label>[26]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Ramesh</surname></string-name> and <string-name><given-names>D.</given-names> <surname>Vydeki</surname></string-name></person-group>, &#x201C;<article-title>Recognition and classification of paddy leaf diseases using optimized deep neural network with jaya algorithm</article-title>,&#x201D; <source>Information Processing in Agriculture</source>, vol. <volume>7</volume>, no. <issue>2</issue>, pp. <fpage>249</fpage>&#x2013;<lpage>260</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-27"><label>[27]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Shankar</surname></string-name>, <string-name><given-names>E.</given-names> <surname>Perumal</surname></string-name>, <string-name><given-names>V. G.</given-names> <surname>D&#x00ED;az</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Tiwari</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Gupta</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>An optimal cascaded recurrent neural network for intelligent COVID-19 detection using chest X-ray images</article-title>,&#x201D; <source>Applied Soft Computing</source>, vol. <volume>113</volume>, pp. <fpage>107878</fpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-28"><label>[28]</label><mixed-citation publication-type="book"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Shankar</surname></string-name>, <string-name><given-names>S. N.</given-names> <surname>Mohanty</surname></string-name>, <string-name><given-names>K.</given-names> <surname>Yadav</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Gopalakrishnan</surname></string-name> and <string-name><given-names>A. M.</given-names> <surname>Elmisery</surname></string-name></person-group>, &#x201C;<chapter-title>Automated COVID-19 diagnosis and classification using convolutional neural network with fusion based feature extraction model</chapter-title>,&#x201D; in <source>Cognitive Neurodynamics</source>, <publisher-loc>US</publisher-loc>: <publisher-name>Springer</publisher-name>, <year>Sep. 2021</year>.</mixed-citation></ref>
<ref id="ref-29"><label>[29]</label><mixed-citation publication-type="book"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Shankar</surname></string-name>, <string-name><given-names>E.</given-names> <surname>Perumal</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Tiwari</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Shorfuzzaman</surname></string-name> and <string-name><given-names>D.</given-names> <surname>Gupta</surname></string-name></person-group>, &#x201C;<chapter-title>Deep learning and evolutionary intelligence with fusion-based feature extraction for detection of COVID-19 from chest X-ray images</chapter-title>,&#x201D; in <source>Multimedia Systems</source>, <publisher-loc>US</publisher-loc>: <publisher-name>Springer</publisher-name>, <year>May 2021</year>.</mixed-citation></ref>
<ref id="ref-30"><label>[30]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Shankar</surname></string-name> and <string-name><given-names>E.</given-names> <surname>Perumal</surname></string-name></person-group>, &#x201C;<article-title>A novel hand-crafted with deep learning features based fusion model for COVID-19 diagnosis and classification using chest X-ray images</article-title>,&#x201D; <source>Complex &#x0026; Intelligent Systems</source>, vol. <volume>7</volume>, no. <issue>3</issue>, pp. <fpage>1277</fpage>&#x2013;<lpage>1293</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-31"><label>[31]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>T.</given-names> <surname>Zheng</surname></string-name> and <string-name><given-names>W.</given-names> <surname>Luo</surname></string-name></person-group>, &#x201C;<article-title>An improved squirrel search algorithm for optimization</article-title>,&#x201D; <source>Complexity</source>, vol. <volume>2019</volume>, pp. <fpage>1</fpage>&#x2013;<lpage>31</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-32"><label>[32]</label><mixed-citation publication-type="web">Dataset: <uri xlink:href="https://github.com/ieee8023/covid-chestxray-dataset">https://github.com/ieee8023/covid-chestxray-dataset</uri>, <year>2021</year>.</mixed-citation></ref>
</ref-list>
</back>
</article>