<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.1 20151215//EN" "http://jats.nlm.nih.gov/publishing/1.1/JATS-journalpublishing1.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:mml="http://www.w3.org/1998/Math/MathML" xml:lang="en" article-type="research-article" dtd-version="1.1">
<front>
<journal-meta>
<journal-id journal-id-type="pmc">CMC</journal-id>
<journal-id journal-id-type="nlm-ta">CMC</journal-id>
<journal-id journal-id-type="publisher-id">CMC</journal-id>
<journal-title-group>
<journal-title>Computers, Materials &#x0026; Continua</journal-title>
</journal-title-group>
<issn pub-type="epub">1546-2226</issn>
<issn pub-type="ppub">1546-2218</issn>
<publisher>
<publisher-name>Tech Science Press</publisher-name>
<publisher-loc>USA</publisher-loc>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">31786</article-id>
<article-id pub-id-type="doi">10.32604/cmc.2023.031786</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Article</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Symbiotic Organisms Search with Deep Learning Driven Biomedical Osteosarcoma Detection and Classification</article-title>
<alt-title alt-title-type="left-running-head">Symbiotic Organisms Search with Deep Learning Driven Biomedical Osteosarcoma Detection and Classification</alt-title>
<alt-title alt-title-type="right-running-head">Symbiotic Organisms Search with Deep Learning Driven Biomedical Osteosarcoma Detection and Classification</alt-title>
</title-group>
<contrib-group>
<contrib id="author-1" contrib-type="author">
<name name-style="western"><surname>Basahel</surname><given-names>Abdullah M.</given-names>
</name><xref ref-type="aff" rid="aff-1">1</xref></contrib>
<contrib id="author-2" contrib-type="author">
<name name-style="western"><surname>Yamin</surname><given-names>Mohammad</given-names>
</name><xref ref-type="aff" rid="aff-1">1</xref></contrib>
<contrib id="author-3" contrib-type="author">
<name name-style="western"><surname>Basahel</surname><given-names>Sulafah M.</given-names>
</name><xref ref-type="aff" rid="aff-2">2</xref></contrib>
<contrib id="author-4" contrib-type="author">
<name name-style="western"><surname>Abusurrah</surname><given-names>Mona M.</given-names>
</name><xref ref-type="aff" rid="aff-3">3</xref></contrib>
<contrib id="author-5" contrib-type="author">
<name name-style="western"><surname>Kumar</surname><given-names>K.Vijaya</given-names>
</name><xref ref-type="aff" rid="aff-4">4</xref></contrib>
<contrib id="author-6" contrib-type="author" corresp="yes">
<name name-style="western"><surname>Lydia</surname><given-names>E. Laxmi</given-names>
</name><xref ref-type="aff" rid="aff-5">5</xref><email>elaxmi2002@yahoo.com</email></contrib>
<aff id="aff-1"><label>1</label><institution>Faculty of Economics and Administration, King Abdulaziz University</institution>, <addr-line>Jeddah</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff-2"><label>2</label><institution>E-Commerce Department, College of Administrative and Financial Sciences, Saudi Electronic University</institution>, <addr-line>Jeddah</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff-3"><label>3</label><institution>Department of Management Information Systems, College of Business Administration, Taibah University</institution>, <addr-line>Al-Madinah</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff-4"><label>4</label><institution>Department of Computer Science and Engineering, GITAM School of Technology</institution>, <addr-line>Visakhapatnam Campus, GITAM (Deemed to be a University)</addr-line>, <country>India</country></aff>
<aff id="aff-5"><label>5</label><institution>Department of Computer Science and Engineering, GMR Institute of Technology</institution>, <addr-line>Andhra Pradesh, Rajam</addr-line>, <country>India</country></aff>
</contrib-group>
<author-notes>
<corresp id="cor1"><label>&#x002A;</label>Corresponding Author: E. Laxmi Lydia. Email: <email>elaxmi2002@yahoo.com</email></corresp>
</author-notes>
<pub-date date-type="collection" publication-format="electronic"><year>2023</year></pub-date>
<pub-date date-type="pub" publication-format="electronic"><day>24</day><month>1</month><year>2023</year></pub-date>
<volume>75</volume>
<issue>1</issue>
<fpage>133</fpage>
<lpage>148</lpage>
<history>
<date date-type="received"><day>27</day><month>4</month><year>2022</year></date>
<date date-type="accepted"><day>24</day><month>6</month><year>2022</year></date>
</history>
<permissions>
<copyright-statement>&#x00A9; 2023 Basahel et al.</copyright-statement>
<copyright-year>2023</copyright-year>
<copyright-holder>Basahel et al.</copyright-holder>
<license xlink:href="https://creativecommons.org/licenses/by/4.0/">
<license-p>This work is licensed under a <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution 4.0 International License</ext-link>, which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.</license-p>
</license>
</permissions>
<self-uri content-type="pdf" xlink:href="TSP_CMC_31786.pdf"></self-uri>
<abstract><p>Osteosarcoma is one of the rare bone cancers that affect the individuals aged between 10 and 30 and it incurs high death rate. Early diagnosis of osteosarcoma is essential to improve the survivability rate and treatment protocols. Traditional physical examination procedure is not only a time-consuming process, but it also primarily relies upon the expert&#x2019;s knowledge. In this background, the recently developed Deep Learning (DL) models can be applied to perform decision making. At the same time, hyperparameter optimization of DL models also plays an important role in influencing overall classification performance. The current study introduces a novel Symbiotic Organisms Search with Deep Learning-driven Osteosarcoma Detection and Classification (SOSDL-ODC) model. The presented SOSDL-ODC technique primarily focuses on recognition and classification of osteosarcoma using histopathological images. In order to achieve this, the presented SOSDL-ODC technique initially applies image pre-processing approach to enhance the quality of image. Also, MobileNetv2 model is applied to generate a suitable group of feature vectors whereas hyperparameter tuning of MobileNetv2 model is performed using SOS algorithm. At last, Gated Recurrent Unit (GRU) technique is applied as a classification model to determine proper class labels. In order to validate the enhanced osteosarcoma classification performance of the proposed SOSDL-ODC technique, a comprehensive comparative analysis was conducted. The obtained outcomes confirmed the betterment of SOSDL-ODC approach than the existing approaches as the former achieved a maximum accuracy of 97.73%.</p>
</abstract>
<kwd-group kwd-group-type="author">
<kwd>Osteosarcoma</kwd>
<kwd>medical imaging</kwd>
<kwd>deep learning</kwd>
<kwd>feature vectors</kwd>
<kwd>computer aided diagnosis</kwd>
<kwd>image classification</kwd>
</kwd-group>
</article-meta>
</front>
<body>
<sec id="s1">
<label>1</label><title>Introduction</title>
<p>Osteosarcoma or Osteogenic Sarcoma is a type of bone cancer that commonly starts developing from long bones of arms and legs [<xref ref-type="bibr" rid="ref-1">1</xref>]. Though its growth can be observed in any of the bone cells within bones, it rarely affects the soft tissues adjacent to bones. Though it has been prevalently diagnosed among teenage or young people, it also affects other age categories. Multiple reasons are cited behind the occurrence of osteosarcoma. Alike other cancer types, it occurs due to genetic disorder and is not mostly connected to familial traits [<xref ref-type="bibr" rid="ref-2">2</xref>]. However, in one of the studies conducted earlier, osteosarcoma was found to have association with familial retinoblastoma cases i.e., family history is concerned with adolescent cancer in eye [<xref ref-type="bibr" rid="ref-3">3</xref>]. The symptoms of osteosarcoma disease include aches from the affected bone in the beginning while as time goes by, the low level of pain occurs and the patient&#x2019;s agony increases. Bone fracture is another important symptom since the affected bones become weak owing to infection and lose its strength [<xref ref-type="bibr" rid="ref-4">4</xref>]. Magnetic Resonance Imaging (MRI), X-rays and histological biopsy test are necessary for diagnosis of osteosarcoma. In recent times, the prognosis of osteosarcoma demands a comprehensive history of the patient and their physical checks [<xref ref-type="bibr" rid="ref-5">5</xref>]. The common indications include constant gnawing pain and swelling at the site of infection, deep-seated, etc., ache in many parts which might portend skeletal metastasis; thus, these symptoms must be properly examined [<xref ref-type="bibr" rid="ref-6">6</xref>].</p>
<p>Earlier, the histopathological images are scrutinized physically or through image processing technologies to find the clusters of nuclei, osteoblasts, development plates, fibrous tissue, blood vessels, nuclei sizes, calcified bone sections, adipocytes, cartilage, marrow cells and osteoid. This is not only a time-consuming process, but also tend to err [<xref ref-type="bibr" rid="ref-7">7</xref>]. At present, advanced Artificial Intelligence (AI) techniques can be applied for cancer cell recognition using simple datasets. The features can be identified automatically within a limited period of time. Additionally, the utilization of AI offers good support for patients as well as doctors by prognosing the symptoms [<xref ref-type="bibr" rid="ref-8">8</xref>]. Besides, the utilization of AI in histopathology domain helps in achieving superior prognosis, categorization, and identification of distinct forms of cancer [<xref ref-type="bibr" rid="ref-9">9</xref>]. At present, Computer-Aided Detection (CAD) algorithms have been developed by researchers to diagnose osteosarcoma through medical images. These techniques particularly make use of Computed Tomography (CT) and MRI scan images. But the outcomes from such systems are confined owing to constraints found in CT and MRI scans [<xref ref-type="bibr" rid="ref-10">10</xref>].</p>
<p>The authors in the literature [<xref ref-type="bibr" rid="ref-11">11</xref>] executed a pipeline for automatic segmentation of Region of Interest (ROI). In this study, a nomogram was utilized combining MRI-based radiomics&#x2019; score and medicinal variables so as to predict the responses for Neo-Adjuvant Chemotherapy (NAC) among osteosarcoma patients. After normalization, ROIs is segmented by DL segmentation method to preoperative MRI and is trained with nnU-Net using two independent manual segmentation labels. Radiomics feature is employed to automatically extract the segmented ROIs. In literature [<xref ref-type="bibr" rid="ref-12">12</xref>], the authors presented an automatic recognition model named Integrated Features-Feature Selection Model for Classification (IF-FSM-C) which diagnoses osteosarcoma in high-resolution Whole Slide Images (WSIs). In order to perform FS, two binary variations were used for the newly-presented Arithmetic Optimization Algorithm (AOA) such as BAOA-S and BAOA-V. The chosen features were then provided for classification during when the WSI is classified as Non-Tumor (NT), Viable Tumor (VT), and Non-Viable Tumor (NVT).</p>
<p>In literature [<xref ref-type="bibr" rid="ref-13">13</xref>], a new and more important structure W-net&#x002B;&#x002B; was presented based on two cascaded U-Nets and dense skip links so as to realize automatic and correct segmentation of osteosarcoma lesion from CT scan image. In this network, multiscale input was used to structure the missing spatial details, due to several encoders and encoding subsamples. In the study conducted earlier [<xref ref-type="bibr" rid="ref-14">14</xref>], Transfer Learning (TL) approaches like pre-trained Convolutional Neural Network (CNN) were altered in public data set on osteosarcoma histological image to detect the necrotic image in non-necrotic and healthy tissues. At that time, TL techniques that contain Visual Geometry Group (VGG19) and Inception V3 were utilized and trained on WSIs without patches so as to enhance the accuracy of output. The authors in literature [<xref ref-type="bibr" rid="ref-15">15</xref>] presented CNN as a tool to enhance the efficacy and precision of osteosarcoma tumor classification under tumor classes (VT, necrosis) and non-tumor classes.</p>
<p>The current study introduces a novel Symbiotic Organisms Search with Deep Learning-driven Osteosarcoma Detection and Classification (SOSDL-ODC) model. The presented SOSDL-ODC technique applies image pre-processing approach to enhance the quality of image. In addition, MobileNetv2 model is applied to generate a suitable group of feature vectors and the hyperparameter tuning of MobileNetv2 model is performed using SOS algorithm. At last, Gated Recurrent Unit (GRU) technique is applied as a classification model to determine suitable class labels. In order to validate the enhanced osteosarcoma classification performance of the proposed SOSDL-ODC technique, the authors conducted a comprehensive comparative analysis and the results were validated.</p>
</sec>
<sec id="s2">
<label>2</label><title>Materials and Methods</title>
<p>In this work, a new SOSDL-ODC approach has been developed for recognition and classification of osteosarcoma using histopathological images. The presented SOSDL-ODC system initially applies image pre-processing approach to enhance the quality of image. After image pre-processing, SOS-MobileNetv2 model is applied to generate a suitable group of feature vectors. Finally, GRU method is applied as a classification model to determine the appropriate class labels. <xref ref-type="fig" rid="fig-1">Fig. 1</xref> depicts the overall block diagram of SOSDL-ODC technique.</p>
<fig id="fig-1">
<label>Figure 1</label>
<caption><title>Block diagram of SOSDL-ODC technique</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_31786-fig-1.tif"/>
</fig>
<sec id="s2_1">
<label>2.1</label><title>Image Pre-processing</title>
<p>The presented SOSDL-ODC technique initially applies image pre-processing approach to enhance the quality of image. Wiener Filter (WF) is utilized in this study to filter noisy signals. This is achieved by employing the spectral feature of the chosen signals whereas the noise present in the signals is assumed to undergo stochastic procedures with linear properties.</p>
<p>Linear filter is executed with a co-efficient <inline-formula id="ieqn-1"><mml:math id="mml-ieqn-1"><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>K</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> on the evaluated signals. An input signal, <inline-formula id="ieqn-2"><mml:math id="mml-ieqn-2"><mml:mi>x</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> contains noises as follows <inline-formula id="ieqn-3"><mml:math id="mml-ieqn-3"><mml:mi>v</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo>,</mml:mo></mml:math></inline-formula></p>
<p><disp-formula id="eqn-1"><label>(1)</label><mml:math id="mml-eqn-1" display="block"><mml:mi>x</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mtext>n</mml:mtext></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:mi>d</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo>+</mml:mo><mml:mi>v</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula></p>
<p>The resultant signal <inline-formula id="ieqn-4"><mml:math id="mml-ieqn-4"><mml:mi>y</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> is a close evaluation of <inline-formula id="ieqn-5"><mml:math id="mml-ieqn-5"><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula>. Therefore, the error signal <inline-formula id="ieqn-6"><mml:math id="mml-ieqn-6"><mml:mi>e</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> is lesser. The adaptive method attempts at correcting the weighted <inline-formula id="ieqn-7"><mml:math id="mml-ieqn-7"><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>K</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, and so the Mean Square Error (MSE) value is minimized.</p>
<p><disp-formula id="eqn-2"><label>(2)</label><mml:math id="mml-eqn-2" display="block"><mml:mi>e</mml:mi><mml:mo>=</mml:mo><mml:mo movablelimits="true" form="prefix">min</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>E</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>e</mml:mi><mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mi>n</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula>whereas,</p>
<p><disp-formula id="eqn-3"><label>(3)</label><mml:math id="mml-eqn-3" display="block"><mml:mi>e</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>n</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mtext>y</mml:mtext></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mtext>n</mml:mtext></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>d</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula></p>
<p>A <inline-formula id="ieqn-8"><mml:math id="mml-ieqn-8"><mml:mi>k</mml:mi></mml:math></inline-formula> tap discrete WF utilizes the subsequent formula to determine the value of y (n)</p>
<p><disp-formula id="eqn-4"><label>(4)</label><mml:math id="mml-eqn-4" display="block"><mml:mi>y</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>n</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi>k</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>k</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">(</mml:mo><mml:mi>d</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>n</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>k</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2217;</mml:mo><mml:mi>v</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>k</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula></p>
<p>Wiener-Hopf formula that computes the optimum weight is one of the important features of WFs.</p>
<p><disp-formula id="eqn-5"><label>(5)</label><mml:math id="mml-eqn-5" display="block"><mml:msubsup><mml:mi>E</mml:mi><mml:mrow><mml:mi>l</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>p</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>&#x03B9;</mml:mi></mml:mrow></mml:msubsup><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>o</mml:mi><mml:mi>l</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>x</mml:mi><mml:mi>x</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mi>k</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>x</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">(</mml:mo><mml:mo>&#x2212;</mml:mo><mml:mi>l</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula></p>
<p>Here, <inline-formula id="ieqn-9"><mml:math id="mml-ieqn-9"><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mn>00</mml:mn></mml:mrow></mml:msub><mml:mo>,</mml:mo><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mn>01</mml:mn></mml:mrow></mml:msub><mml:mo>,</mml:mo><mml:mo>&#x22EF;</mml:mo><mml:mo>&#x22EF;</mml:mo><mml:mo>&#x22C5;</mml:mo><mml:mo>&#x22C5;</mml:mo><mml:mo>,</mml:mo><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>o</mml:mi><mml:mi>p</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:math></inline-formula> denotes the optimal value of tap weighted filters and <inline-formula id="ieqn-10"><mml:math id="mml-ieqn-10"><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>x</mml:mi><mml:mi>x</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> signifies the autocorrelation function of x(n) and <inline-formula id="ieqn-11"><mml:math id="mml-ieqn-11"><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>x</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> denotes the cross correlation function between <inline-formula id="ieqn-12"><mml:math id="mml-ieqn-12"><mml:mi>x</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> and <inline-formula id="ieqn-13"><mml:math id="mml-ieqn-13"><mml:mi>d</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula>.</p>
</sec>
<sec id="s2_2">
<label>2.2</label><title>Feature Extraction: MobileNetv2 Model</title>
<p>After image pre-processing, MobileNetv2 technique is leveraged to generate a suitable set of feature vectors [<xref ref-type="bibr" rid="ref-16">16</xref>]. MobileNetV1 is developed based on conventional VGG structure and is used to construct a system by stacking the convolutional layers so as to enhance the performance. But, gradient vanishing problem arises after stacking multiple convolution layers. The residual blocks present in ResNet make it easy for the dataset to stream among the layers. This process gets rid of the gradient exploring problems in backpropagation while the features are reused in forwarding propagation. In addition to continuing with in-depth separable convolutional layers of MobileNetV1, the MobileNetV2 technique draws on ResNet architecture too. In comparison with MobileNetV1, the major development found in MobileNetV2 is that it comprises of two points such as the accomplishment of inverted residual block and linear bottleneck in the networks. A certain conceptual framework feature is attained from the expansion factor t in N to M networks with stride, s. This bottleneck includes a 1 &#x00D7; 1 convolutional layer before depth-wise convolution layers and employs linear activation instead of non-linear activation after point-wise convolution layers. Further, it determines the down-sampling process by setting the variable, &#x2018;s&#x2019; in depth-wise convolution layers.</p>
<p>In the entire architecture of MobileNetV2, conv2d refers to a typical convolution, avgpool indicates the average pooling, <inline-formula id="ieqn-14"><mml:math id="mml-ieqn-14"><mml:mi>c</mml:mi></mml:math></inline-formula> represents the amount of output networks and &#x2018;n&#x2019; indicates the recurrent time. On the whole, the network comprises of 19 layers whereas the middle layer is used for the extraction of features. Further, the final layer is utilized for classification. According to transfer learning model, the current study initially employed &#x2018;MobileNetV2 pre-trained model&#x2019;, using ImageNet, as a feature extractor and trained two layers which are later fine-tuned with a few layers rather than each trainable layer.</p>
</sec>
<sec id="s2_3">
<label>2.3</label><title>Hyperparameter Optimization: SOS Algorithm</title>
<p>In this work, the hyperparameter involved [<xref ref-type="bibr" rid="ref-17">17</xref>&#x2013;<xref ref-type="bibr" rid="ref-19">19</xref>] in MobileNetv2 model are fine-tuned using SOS algorithm [<xref ref-type="bibr" rid="ref-20">20</xref>]. Being an advanced method, SOS algorithm is used to resolve the optimization problems according to the interaction among organisms in nature. In general, organisms hardly live in isolation because each organism is dependent upon another for both survival and nutrition. These relationships mainly exist on the basis of trust. In other terms, this relationship is called symbiotic relationship. SOS approach initiates in the presence of ecosystem with initial population. At first, a collection of organisms is arbitrarily created. These arbitrarily-created organisms denote the solution for the presented problem. SOS approach yields a novel solution based on stimulation of living relations between two ecosystems. The pseudocode of the SOS algorithm includes parasitism, mutualism, and commensalism of natural biological relations.</p>
<p><italic>Mutualism</italic></p>
<p>In this type of relationship, both the organisms benefit from each other. For instance, assume the relationship between honeybees and flowers. The bees fly over flowers to collect the nectar needed for honey production. On the other hand, it is advantageous for the flowers since bees smatter the pollen and simplify the pollination process. During SOS process, <inline-formula id="ieqn-15"><mml:math id="mml-ieqn-15"><mml:mi>X</mml:mi><mml:mi>i</mml:mi></mml:math></inline-formula> indicates the organism which is equivalent to <inline-formula id="ieqn-16"><mml:math id="mml-ieqn-16"><mml:mi>i</mml:mi></mml:math></inline-formula>-<inline-formula id="ieqn-17"><mml:math id="mml-ieqn-17"><mml:mi>t</mml:mi><mml:mi>h</mml:mi></mml:math></inline-formula> individual. The <inline-formula id="ieqn-18"><mml:math id="mml-ieqn-18"><mml:mi>X</mml:mi><mml:mi>j</mml:mi></mml:math></inline-formula> second organism is chosen arbitrarily and related to Xi. In conclusion, Xi and Xj are upgraded in Mutualism stage as follows:</p>
<p><disp-formula id="eqn-6"><label>(6)</label><mml:math id="mml-eqn-6" display="block"><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mi>e</mml:mi><mml:mi>w</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mn>0</mml:mn><mml:mo>,</mml:mo><mml:mn>1</mml:mn><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x00D7;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mi mathvariant="italic">M</mml:mi><mml:mi mathvariant="italic">u</mml:mi><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">u</mml:mi><mml:mi mathvariant="italic">a</mml:mi></mml:mrow><mml:msub><mml:mn>1</mml:mn><mml:mrow><mml:mo>&#x2212;</mml:mo></mml:mrow></mml:msub><mml:mrow><mml:mi mathvariant="italic">V</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">c</mml:mi><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">r</mml:mi></mml:mrow><mml:mo>&#x00D7;</mml:mo><mml:mi>B</mml:mi><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula></p>
<p><disp-formula id="eqn-7"><label>(7)</label><mml:math id="mml-eqn-7" display="block"><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mi>n</mml:mi><mml:mi>e</mml:mi><mml:mi>w</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mn>0</mml:mn><mml:mo>,</mml:mo><mml:mn>1</mml:mn><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x00D7;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mi mathvariant="italic">M</mml:mi><mml:mi mathvariant="italic">u</mml:mi><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">u</mml:mi><mml:mi mathvariant="italic">a</mml:mi></mml:mrow><mml:msub><mml:mn>1</mml:mn><mml:mrow><mml:mo>&#x2212;</mml:mo></mml:mrow></mml:msub><mml:mrow><mml:mi mathvariant="italic">V</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">c</mml:mi><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">r</mml:mi></mml:mrow><mml:mo>&#x00D7;</mml:mo><mml:mi>B</mml:mi><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula></p>
<p><disp-formula id="eqn-8"><label>(8)</label><mml:math id="mml-eqn-8" display="block"><mml:mrow><mml:mi mathvariant="italic">M</mml:mi><mml:mi mathvariant="italic">u</mml:mi><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">u</mml:mi><mml:mi mathvariant="italic">a</mml:mi></mml:mrow><mml:msub><mml:mn>1</mml:mn><mml:mrow><mml:mo>&#x2212;</mml:mo></mml:mrow></mml:msub><mml:mrow><mml:mi mathvariant="italic">V</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">c</mml:mi><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">r</mml:mi></mml:mrow><mml:mo>=</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:mo>/</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:math></disp-formula></p>
<p>In the above equation, <inline-formula id="ieqn-19"><mml:math id="mml-ieqn-19"><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mn>0</mml:mn><mml:mo>,</mml:mo><mml:mn>1</mml:mn><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> denotes an arbitrary number of vectors. BF<sub>1</sub> and BF<sub>2</sub> indicate the profit factors of <inline-formula id="ieqn-20"><mml:math id="mml-ieqn-20"><mml:mi>X</mml:mi><mml:mi>i</mml:mi></mml:math></inline-formula> and <inline-formula id="ieqn-21"><mml:math id="mml-ieqn-21"><mml:mi>X</mml:mi><mml:mi>j</mml:mi></mml:math></inline-formula> that demonstrate the profit of every organism. In <xref ref-type="disp-formula" rid="eqn-6">Eq. (6)</xref>, <inline-formula id="ieqn-22"><mml:math id="mml-ieqn-22"><mml:mrow><mml:mi mathvariant="italic">M</mml:mi><mml:mi mathvariant="italic">u</mml:mi><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">u</mml:mi><mml:mi mathvariant="italic">a</mml:mi></mml:mrow><mml:mrow><mml:mtext>l</mml:mtext></mml:mrow><mml:mi mathvariant="normal">&#x005F;</mml:mi><mml:mrow><mml:mi mathvariant="italic">V</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">c</mml:mi><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">r</mml:mi></mml:mrow></mml:math></inline-formula> characterizes the relationships between Xi and <inline-formula id="ieqn-23"><mml:math id="mml-ieqn-23"><mml:mi>X</mml:mi><mml:mi>j</mml:mi></mml:math></inline-formula>. The <inline-formula id="ieqn-24"><mml:math id="mml-ieqn-24"><mml:mrow><mml:mi mathvariant="italic">M</mml:mi><mml:mi mathvariant="italic">u</mml:mi><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">u</mml:mi><mml:mi mathvariant="italic">a</mml:mi></mml:mrow><mml:mrow><mml:mtext>l</mml:mtext></mml:mrow><mml:mi mathvariant="normal">&#x005F;</mml:mi><mml:mrow><mml:mi mathvariant="italic">V</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">c</mml:mi><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">o</mml:mi></mml:mrow><mml:msup><mml:mi>r</mml:mi><mml:mrow><mml:mo>&#x2217;</mml:mo></mml:mrow></mml:msup><mml:mi>B</mml:mi><mml:mi>F</mml:mi><mml:mn>2</mml:mn></mml:math></inline-formula> in <xref ref-type="disp-formula" rid="eqn-6">Eqs. (6)</xref> and <xref ref-type="disp-formula" rid="eqn-7">(7)</xref> is the effort taken to increase the survival rate of living organisms. As per Darwinian concept, i.e., survival of the fittest, each organism has to rise its compatibility with its surroundings. Now, <inline-formula id="ieqn-25"><mml:math id="mml-ieqn-25"><mml:mrow><mml:mi mathvariant="italic">X</mml:mi><mml:mi mathvariant="italic">b</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">t</mml:mi></mml:mrow></mml:math></inline-formula> characterizes the maximum phase of compatibility.</p>
<p><italic>Commensalism</italic></p>
<p>Here, one of the organisms gains benefits without harming the other organism in the relationship. For instance, in the relationship between sharks and sticky fish, the sticky fish sticks around the shark and feed on residual foods; the shark gains small or no benefits. In this Mutualism process, the <inline-formula id="ieqn-26"><mml:math id="mml-ieqn-26"><mml:mi>X</mml:mi><mml:mi>j</mml:mi></mml:math></inline-formula> organism is arbitrarily chosen and is related to Xi. Here, Xi strives to obtain the benefits while <inline-formula id="ieqn-27"><mml:math id="mml-ieqn-27"><mml:mi>X</mml:mi><mml:mi>j</mml:mi></mml:math></inline-formula> gets no benefits or losses. Therefore, Xi is upgraded as follows.</p>
<p><disp-formula id="eqn-9"><label>(9)</label><mml:math id="mml-eqn-9" display="block"><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mi>e</mml:mi><mml:mi>w</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>1</mml:mn><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x00D7;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula></p>
<p>In <xref ref-type="disp-formula" rid="eqn-9">Eq. (9)</xref>, <inline-formula id="ieqn-28"><mml:math id="mml-ieqn-28"><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> indicates the benefit given when using <inline-formula id="ieqn-29"><mml:math id="mml-ieqn-29"><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> for increased survival of <inline-formula id="ieqn-30"><mml:math id="mml-ieqn-30"><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>.</mml:mo></mml:math></inline-formula></p>
<p><italic>Parasitism</italic></p>
<p>In this relationship type, one organism gains the benefit out of the other organism while the second one faces loss. For instance, malarial blood parasites attack the human body and proliferate which, at times, cause death also. In SOS process, Xi, viz., malaria mosquito generates an artificial parasite named &#x2018;Parasite-Vector&#x2019;. It is generated in the searching region with the replication of <inline-formula id="ieqn-31"><mml:math id="mml-ieqn-31"><mml:mi>X</mml:mi><mml:mi>i</mml:mi></mml:math></inline-formula>. Here, <inline-formula id="ieqn-32"><mml:math id="mml-ieqn-32"><mml:mi>X</mml:mi><mml:mi>j</mml:mi></mml:math></inline-formula> is arbitrarily chosen using the ecosystem and the parasite serves as its host. Parasite-Vector tries to capture the position of Xj in the ecosystem. Both Xi and <inline-formula id="ieqn-33"><mml:math id="mml-ieqn-33"><mml:mi>X</mml:mi><mml:mi>j</mml:mi></mml:math></inline-formula> are estimated to determine the capability. Once parasite-vector demolishes <inline-formula id="ieqn-34"><mml:math id="mml-ieqn-34"><mml:mi>X</mml:mi><mml:mi>j</mml:mi></mml:math></inline-formula> and takes its position in the ecosystem, it accomplishes the maximum capability. The maximum capability for <inline-formula id="ieqn-35"><mml:math id="mml-ieqn-35"><mml:mi>X</mml:mi><mml:mi>j</mml:mi></mml:math></inline-formula> can be accomplished once it resists the parasite, and the parasite cannot live further in the ecosystem.</p>
<fig id="fig-13">
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_31786-fig-13.tif"/>
</fig>
</sec>
<sec id="s2_4">
<label>2.4</label><title>Image Classification: GRU Model</title>
<p>In this final stage, GRU technique is executed as a classification model to determine suitable class labels [<xref ref-type="bibr" rid="ref-21">21</xref>]. GRU is an efficient memory cell that can be used for different kinds of applications. It is viewed as an improvement of Long Short Term Memory (LSTM) and its performance can be compared and generalized. In order to define a GRU evidently, the researchers proposed LSTM. In Recurrent Neural Network (RNN), hidden unit is a major element since it is accountable for forgetting or remembering certain datasets. LSTM, presented by Hochreiter and Schmidhuber, is an effective enforcement and has several alternatives to its credit. The connection relationship is shown in the following equation.</p>
<p><disp-formula id="eqn-10"><label>(10)</label><mml:math id="mml-eqn-10" display="block"><mml:mrow><mml:mrow><mml:mo>{</mml:mo> <mml:mtable columnalign='left'><mml:mtr><mml:mtd><mml:msub><mml:mi>f</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>x</mml:mi><mml:mi>f</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>x</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>h</mml:mi><mml:mi>f</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>c</mml:mi><mml:mi>f</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>C</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:msub><mml:mi>i</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>x</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>h</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>c</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>C</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mtext>&#x2009;&#x2009;</mml:mtext><mml:msub><mml:mi>C</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:msub><mml:mi>f</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>&#x2299;</mml:mo><mml:msub><mml:mi>C</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>i</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>&#x2299;</mml:mo><mml:mi>tanh</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>x</mml:mi><mml:mi>c</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>x</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>h</mml:mi><mml:mi>c</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mtext>&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;</mml:mtext><mml:msub><mml:mi>o</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>x</mml:mi><mml:mi>o</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>x</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>h</mml:mi><mml:mi>o</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>c</mml:mi><mml:mi>o</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>C</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mtext>&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;</mml:mtext><mml:msub><mml:mi>h</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:msub><mml:mi>o</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>&#x2299;</mml:mo><mml:mi>tanh</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msub><mml:mi>C</mml:mi><mml:mi>t</mml:mi></mml:msub></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mtext>&#x2009;&#x2009;</mml:mtext></mml:mtd></mml:mtr></mml:mtable></mml:mrow></mml:mrow></mml:math></disp-formula></p>
<p>In this formula, <inline-formula id="ieqn-36"><mml:math id="mml-ieqn-36"><mml:mi>C</mml:mi></mml:math></inline-formula> represents the cell state, <inline-formula id="ieqn-37"><mml:math id="mml-ieqn-37"><mml:mi>x</mml:mi></mml:math></inline-formula> indicates the input vector <inline-formula id="ieqn-38"><mml:math id="mml-ieqn-38"><mml:mrow><mml:mtext>and&#xA0;</mml:mtext></mml:mrow><mml:mi>h</mml:mi></mml:math></inline-formula> denotes the resultant vector. Here, <inline-formula id="ieqn-39"><mml:math id="mml-ieqn-39"><mml:mi>t</mml:mi></mml:math></inline-formula> characterizes the existing time and <inline-formula id="ieqn-40"><mml:math id="mml-ieqn-40"><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:math></inline-formula> indicates the final time. <inline-formula id="ieqn-41"><mml:math id="mml-ieqn-41"><mml:mi>&#x03C3;</mml:mi></mml:math></inline-formula> indicates a sigmoid function, <inline-formula id="ieqn-42"><mml:math id="mml-ieqn-42"><mml:mo>&#x2299;</mml:mo></mml:math></inline-formula> implies Hadamard product and <inline-formula id="ieqn-43"><mml:math id="mml-ieqn-43"><mml:mi>W</mml:mi></mml:math></inline-formula> embodies an uncertain variable. <inline-formula id="ieqn-44"><mml:math id="mml-ieqn-44"><mml:mi>f</mml:mi></mml:math></inline-formula> signifies the forget gate that decides what data needs to be removed from the cell state. <inline-formula id="ieqn-45"><mml:math id="mml-ieqn-45"><mml:mi>i</mml:mi></mml:math></inline-formula> denotes the input gate that decides which dataset needs to be saved in the cell states. <inline-formula id="ieqn-46"><mml:math id="mml-ieqn-46"><mml:mi>o</mml:mi></mml:math></inline-formula> indicates the output gate which decides the dataset that should be given to output. In contrast to LSTM, GRU involves certain simplification processes. The connection relationship is demonstrated herewith.
<disp-formula id="eqn-11"><label>(11)</label><mml:math id="mml-eqn-11" display="block"><mml:mrow><mml:mrow><mml:mo>{</mml:mo> <mml:mtable columnalign='left'><mml:mtr><mml:mtd><mml:msub><mml:mi>r</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msub><mml:mi>W</mml:mi><mml:mi>r</mml:mi></mml:msub><mml:msub><mml:mi>x</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>U</mml:mi><mml:mi>r</mml:mi></mml:msub><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:msub><mml:mi>z</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msub><mml:mi>W</mml:mi><mml:mi>z</mml:mi></mml:msub><mml:msub><mml:mi>x</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>U</mml:mi><mml:mi>z</mml:mi></mml:msub><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:msub><mml:mover accent='true'><mml:mi>h</mml:mi><mml:mo>&#x02DC;</mml:mo></mml:mover><mml:mi>t</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mi>tanh</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msub><mml:mi>W</mml:mi><mml:mi>h</mml:mi></mml:msub><mml:msub><mml:mi>x</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>+</mml:mo><mml:mi>U</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>&#x2299;</mml:mo><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mtext>&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;&#x2009;</mml:mtext><mml:msub><mml:mi>h</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>z</mml:mi><mml:mi>t</mml:mi></mml:msub></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>z</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:msub><mml:mover accent='true'><mml:mi>h</mml:mi><mml:mo>&#x02DC;</mml:mo></mml:mover><mml:mi>t</mml:mi></mml:msub></mml:mtd></mml:mtr></mml:mtable></mml:mrow></mml:mrow></mml:math></disp-formula></p>
<p>From the equation given below, <inline-formula id="ieqn-47"><mml:math id="mml-ieqn-47"><mml:mi>h</mml:mi></mml:math></inline-formula> symbolizes the output vector, <inline-formula id="ieqn-48"><mml:math id="mml-ieqn-48"><mml:mrow><mml:mover><mml:mi>h</mml:mi><mml:mo stretchy="false">&#x007E;</mml:mo></mml:mover></mml:mrow></mml:math></inline-formula> represents the candidate output and <inline-formula id="ieqn-49"><mml:math id="mml-ieqn-49"><mml:mi>x</mml:mi></mml:math></inline-formula> indicates the input vector. Here, <inline-formula id="ieqn-50"><mml:math id="mml-ieqn-50"><mml:mi>z</mml:mi></mml:math></inline-formula> represents the update gate and <inline-formula id="ieqn-51"><mml:math id="mml-ieqn-51"><mml:mi>r</mml:mi></mml:math></inline-formula> denotes the reset gate of GRU. Hence, GRU is simple when compared to LSTM and has fewer parameters. However, it provide great benefits in terms of convergence and performance. GRU yielded high benefits in the experiments. <xref ref-type="fig" rid="fig-2">Fig. 2</xref> illustrates the infrastructure of GRU method.</p>
<fig id="fig-2">
<label>Figure 2</label>
<caption><title>Structure of GRU</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_31786-fig-2.tif"/>
</fig>
</sec>
</sec>
<sec id="s3">
<label>3</label><title>Results and Discussion</title>
<p>The performance of the proposed SOSDL-ODC method was validated using a dataset [<xref ref-type="bibr" rid="ref-22">22</xref>] that comprises of 1,144 images under three classes. <xref ref-type="table" rid="table-1">Table 1</xref> shows the details of the dataset considered for the study. Some of the sample images are illustrated in <xref ref-type="fig" rid="fig-3">Fig. 3</xref>.</p>
<table-wrap id="table-1">
<label>Table 1</label>
<caption><title>Dataset details</title>
</caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th>Class name</th>
<th>No. of images</th>
</tr>
</thead>
<tbody>
<tr>
<td>Viable tumor (VT)</td>
<td>345</td>
</tr>
<tr>
<td>Non-viable tumor (NVT)</td>
<td>263</td>
</tr>
<tr>
<td>Non-tumor (NT)</td>
<td>536</td>
</tr>
<tr>
<td>Total</td>
<td>1144</td>
</tr>
</tbody>
</table>
</table-wrap><fig id="fig-3">
<label>Figure 3</label>
<caption><title>Sample images</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_31786-fig-3.tif"/>
</fig>
<p><xref ref-type="fig" rid="fig-4">Fig. 4</xref> displays a set of confusion matrices generated by the proposed SOSDL-ODC method on the applied dataset under distinct number of epochs. The figures imply that the proposed SOSDL-ODC method achieved high performance under every epoch. For samples, with 200 epochs, the proposed SOSDL-ODC method classified 310 samples under VT, 241 samples under NVT, and 513 samples under NT class. Along with that, with 400 epochs, the proposed SOSDL-ODC approach categorized 313 samples under VT, 241 samples under NVT, and 512 samples under NT classes. Followed by, with 600 epochs, SOSDL-ODC algorithm identified 327 samples under VT, 252 samples under NVT, and 525 samples under NT classes. In line with this, with 800 epochs, the proposed SOSDL-ODC system identified 327 samples under VT, 252 samples under NVT, and 526 samples under NT classes.</p>
<fig id="fig-4">
<label>Figure 4</label>
<caption><title>Confusion matrices generated by SOSDL-ODC technique at (a) epoch-200, (b) epoch-400, (c)&#x00A0;epoch-600, and (d) epoch-800</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_31786-fig-4a.tif"/><graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_31786-fig-4b.tif"/>
</fig>
<p><xref ref-type="table" rid="table-2">Tab. 2</xref> and <xref ref-type="fig" rid="fig-5">Fig. 5</xref> shows the analysis results accomplished by SOSDL-ODC method under distinct number of classes and epochs. The experimental outcomes imply that the proposed SOSDL-ODC method has the ability to attain effectual outcomes under all classes and epochs. For sample, with 200 epochs, SOSDL-ODC methodology gained average <inline-formula id="ieqn-52"><mml:math id="mml-ieqn-52"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, <inline-formula id="ieqn-53"><mml:math id="mml-ieqn-53"><mml:mi>p</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, <inline-formula id="ieqn-54"><mml:math id="mml-ieqn-54"><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>l</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, <inline-formula id="ieqn-55"><mml:math id="mml-ieqn-55"><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mrow><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">c</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">r</mml:mi><mml:mi mathvariant="italic">e</mml:mi></mml:mrow></mml:mrow></mml:msub></mml:math></inline-formula>, and MCC values such as 95.34%, 92.71%, 92.40%, 92.55%, and 88.93% respectively. Eventually, with 400 epochs, the proposed SOSDL-ODC system reached average <inline-formula id="ieqn-56"><mml:math id="mml-ieqn-56"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, <inline-formula id="ieqn-57"><mml:math id="mml-ieqn-57"><mml:mi>p</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, <inline-formula id="ieqn-58"><mml:math id="mml-ieqn-58"><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>l</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, <inline-formula id="ieqn-59"><mml:math id="mml-ieqn-59"><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mrow><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">c</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">r</mml:mi><mml:mi mathvariant="italic">e</mml:mi></mml:mrow></mml:mrow></mml:msub></mml:math></inline-formula>, and MCC values such as 95.45%, 92.96%, 92.63%, 92.79%, and 89.25% correspondingly. Meanwhile, with 600 epochs, the presented SOSDL-ODC methodology gained average <inline-formula id="ieqn-60"><mml:math id="mml-ieqn-60"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, <inline-formula id="ieqn-61"><mml:math id="mml-ieqn-61"><mml:mi>p</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, <inline-formula id="ieqn-62"><mml:math id="mml-ieqn-62"><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>l</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, <inline-formula id="ieqn-63"><mml:math id="mml-ieqn-63"><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mrow><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">c</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">r</mml:mi><mml:mi mathvariant="italic">e</mml:mi></mml:mrow></mml:mrow></mml:msub></mml:math></inline-formula>, and MCC values such as 97.67%, 96.32%, 96.18%, 96.25%, and 94.45% correspondingly. At last, with 800 epochs, the proposed SOSDL-ODC system reached average <inline-formula id="ieqn-64"><mml:math id="mml-ieqn-64"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, <inline-formula id="ieqn-65"><mml:math id="mml-ieqn-65"><mml:mi>p</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, <inline-formula id="ieqn-66"><mml:math id="mml-ieqn-66"><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>l</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, <inline-formula id="ieqn-67"><mml:math id="mml-ieqn-67"><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mrow><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">c</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">r</mml:mi><mml:mi mathvariant="italic">e</mml:mi></mml:mrow></mml:mrow></mml:msub></mml:math></inline-formula>, and MCC values such as 97.73%, 96.42%, 96.24%, 96.33%, and 94.58% correspondingly.</p>
<table-wrap id="table-2">
<label>Table 2</label>
<caption><title>Results of the analysis of SOSDL-ODC technique under different measures and epochs</title>
</caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th>Measures</th>
<th>Class name</th>
<th>Epoch-200</th>
<th>Epoch-400</th>
<th>Epoch-600</th>
<th>Epoch-800</th>
</tr>
</thead>
<tbody>
<tr>
<td>Accuracy</td>
<td>Viable tumor</td>
<td>94.76</td>
<td>94.84</td>
<td>97.38</td>
<td>97.47</td>
</tr>
<tr>
<td/>
<td>Non-viable tumor</td>
<td>96.15</td>
<td>96.42</td>
<td>97.99</td>
<td>97.99</td>
</tr>
<tr>
<td/>
<td>Non-tumor</td>
<td>95.10</td>
<td>95.10</td>
<td>97.64</td>
<td>97.73</td>
</tr>
<tr>
<td/>
<td>Average</td>
<td>95.34</td>
<td>95.45</td>
<td>97.67</td>
<td>97.73</td>
</tr>
<tr>
<td>Precision</td>
<td>Viable tumor</td>
<td>92.54</td>
<td>92.06</td>
<td>96.46</td>
<td>96.75</td>
</tr>
<tr>
<td/>
<td>Non-viable tumor</td>
<td>91.63</td>
<td>92.69</td>
<td>95.45</td>
<td>95.45</td>
</tr>
<tr>
<td/>
<td>Non-tumor</td>
<td>93.96</td>
<td>94.12</td>
<td>97.04</td>
<td>97.05</td>
</tr>
<tr>
<td/>
<td>Average</td>
<td>92.71</td>
<td>92.96</td>
<td>96.32</td>
<td>96.42</td>
</tr>
<tr>
<td>Recall</td>
<td>Viable tumor</td>
<td>89.86</td>
<td>90.72</td>
<td>94.78</td>
<td>94.78</td>
</tr>
<tr>
<td/>
<td>Non-viable tumor</td>
<td>91.63</td>
<td>91.63</td>
<td>95.82</td>
<td>95.82</td>
</tr>
<tr>
<td/>
<td>Non-tumor</td>
<td>95.71</td>
<td>95.52</td>
<td>97.95</td>
<td>98.13</td>
</tr>
<tr>
<td/>
<td>Average</td>
<td>92.40</td>
<td>92.63</td>
<td>96.18</td>
<td>96.24</td>
</tr>
<tr>
<td>F-Score</td>
<td>Viable tumor</td>
<td>91.18</td>
<td>91.39</td>
<td>95.61</td>
<td>95.75</td>
</tr>
<tr>
<td/>
<td>Non-viable tumor</td>
<td>91.63</td>
<td>92.16</td>
<td>95.64</td>
<td>95.64</td>
</tr>
<tr>
<td/>
<td>Non-tumor</td>
<td>94.82</td>
<td>94.81</td>
<td>97.49</td>
<td>97.59</td>
</tr>
<tr>
<td/>
<td>Average</td>
<td>92.55</td>
<td>92.79</td>
<td>96.25</td>
<td>96.33</td>
</tr>
<tr>
<td>MCC</td>
<td>Viable tumor</td>
<td>87.47</td>
<td>87.71</td>
<td>93.75</td>
<td>93.96</td>
</tr>
<tr>
<td/>
<td>Non-viable tumor</td>
<td>89.14</td>
<td>89.84</td>
<td>94.33</td>
<td>94.33</td>
</tr>
<tr>
<td/>
<td>Non-tumor</td>
<td>90.20</td>
<td>90.19</td>
<td>95.27</td>
<td>95.44</td>
</tr>
<tr>
<td/>
<td>Average</td>
<td>88.93</td>
<td>89.25</td>
<td>94.45</td>
<td>94.58</td>
</tr>
</tbody>
</table>
</table-wrap><fig id="fig-5">
<label>Figure 5</label>
<caption><title>Average analysis results of SOSDL-ODC technique at (a) epoch-200, (b) epoch-400, (c) epoch-600, and (d) epoch-800</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_31786-fig-5.tif"/>
</fig>
<p>Both Training Accuracy (TA) and Validation Accuracy (VA) values, attained by the proposed SOSDL-ODC approach on test dataset, are demonstrated in <xref ref-type="fig" rid="fig-6">Fig. 6</xref>. The experimental outcomes imply that the proposed SOSDL-ODC method gained the maximum TA and vA values. To be specific, VA is superior to TA.</p>
<fig id="fig-6">
<label>Figure 6</label>
<caption><title>TA and VA analyses results of SOSDL-ODC technique</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_31786-fig-6.tif"/>
</fig>
<p>Both Training Loss (TL) and Validation Loss (VL) values, achieved by SOSDL-ODC methodology on test dataset, are shown in <xref ref-type="fig" rid="fig-7">Fig. 7</xref>. The experimental outcomes infer that the proposed SOSDL-ODC system achieved the least TL and VL values. To be specific, VL seemed to be lower than TL.</p>
<fig id="fig-7">
<label>Figure 7</label>
<caption><title>TL and VL analyses results of SOSDL-ODC technique</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_31786-fig-7.tif"/>
</fig>
<p>A brief precision-recall inspection was conducted upon SOSDL-ODC method on test dataset and the results are depicted in <xref ref-type="fig" rid="fig-8">Fig. 8</xref>. By observing the figure, it can be inferred that the proposed SOSDL-ODC model accomplished high precision-recall performance under all the classes.</p>
<fig id="fig-8">
<label>Figure 8</label>
<caption><title>Precision-recall curve analysis results of SOSDL-ODC technique</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_31786-fig-8.tif"/>
</fig>
<p>A detailed ROC investigation was conducted upon SOSDL-ODC approach on test dataset and the results are showcased in <xref ref-type="fig" rid="fig-9">Fig. 9</xref>. The results indicate that the proposed SOSDL-ODC algorithm exhibited its ability to categorize three different classes such as VT, NVT, and NT on test dataset. In order to validate the enhanced performance of SOSDL-ODC method, a detailed comparative analysis was conducted and the results are shown in <xref ref-type="table" rid="table-3">Tab. 3</xref> [<xref ref-type="bibr" rid="ref-23">23</xref>,<xref ref-type="bibr" rid="ref-24">24</xref>].</p>
<fig id="fig-9">
<label>Figure 9</label>
<caption><title>ROC curve analysis results of SOSDL-ODC technique</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_31786-fig-9.tif"/>
</fig><table-wrap id="table-3">
<label>Table 3</label>
<caption><title>Comparative analysis results of SOSDL-ODC technique and other existing methodologies</title>
</caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th>Methods</th>
<th>Precision</th>
<th>Recall</th>
<th>Accuracy</th>
</tr>
</thead>
<tbody>
<tr>
<td>VGG16</td>
<td>79.19</td>
<td>74.48</td>
<td>80.90</td>
</tr>
<tr>
<td>LeNet</td>
<td>74.62</td>
<td>67.59</td>
<td>68.55</td>
</tr>
<tr>
<td>ReseNt50</td>
<td>77.98</td>
<td>79.36</td>
<td>76.52</td>
</tr>
<tr>
<td>AlexNet</td>
<td>81.30</td>
<td>75.18</td>
<td>73.93</td>
</tr>
<tr>
<td>Sequential RNN</td>
<td>89.08</td>
<td>90.49</td>
<td>88.77</td>
</tr>
<tr>
<td>Non-regularized CNN</td>
<td>88.67</td>
<td>92.55</td>
<td>90.45</td>
</tr>
<tr>
<td>Regularized CNN</td>
<td>90.53</td>
<td>93.99</td>
<td>91.52</td>
</tr>
<tr>
<td>SOSDL-ODC</td>
<td>96.42</td>
<td>96.24</td>
<td>97.73</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>A detailed <inline-formula id="ieqn-68"><mml:math id="mml-ieqn-68"><mml:mi>p</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> analysis was conducted between SOSDL-ODC method and other existing models and the results are shown in <xref ref-type="fig" rid="fig-10">Fig. 10</xref>. The figure highlights that VGG16, LeNet, and ReseNt50 models achieved the least <inline-formula id="ieqn-69"><mml:math id="mml-ieqn-69"><mml:mi>p</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> values such as 79.19%, 74.62%, and 77.98% respectively. On the other hand, AlexNet model attempted to achieve an increased <inline-formula id="ieqn-70"><mml:math id="mml-ieqn-70"><mml:mi>p</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> value of 81.30%. Though Sequential RNN, Non-regularized CNN, and Regularized CNN models accomplished enhanced <inline-formula id="ieqn-71"><mml:math id="mml-ieqn-71"><mml:mi>p</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> values such as 89.08%, 88.67%, and 90.53%, the presented SOSDL-ODC method gained the maximal <inline-formula id="ieqn-72"><mml:math id="mml-ieqn-72"><mml:mi>p</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> value of 96.42%.</p>
<fig id="fig-10">
<label>Figure 10</label>
<caption><title><inline-formula id="ieqn-73"><mml:math id="mml-ieqn-73"><mml:mi>P</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> analysis results of SOSDL-ODC technique and other recent methodologies</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_31786-fig-10.tif"/>
</fig>
<p>A brief <inline-formula id="ieqn-74"><mml:math id="mml-ieqn-74"><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>l</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> analysis was conducted between the proposed SOSDL-ODC method and other existing algorithms and the results are portrayed in <xref ref-type="fig" rid="fig-11">Fig. 11</xref>. The figure reveals that VGG16, LeNet, and ReseNt50 methods achieved the least <inline-formula id="ieqn-75"><mml:math id="mml-ieqn-75"><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>l</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> values such as 74.48%, 67.59%, and 79.36% correspondingly. Also, AlexNet model attempted to demonstrate an enhanced <inline-formula id="ieqn-76"><mml:math id="mml-ieqn-76"><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>l</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> value of 75.18%. Eventually, Sequential RNN, Non-regularized CNN, and Regularized CNN techniques accomplished enhanced <inline-formula id="ieqn-77"><mml:math id="mml-ieqn-77"><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>l</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> values such as 90.49%, 92.55%, and 93.99%. However, the presented SOSDL-ODC method achieved the maximum <inline-formula id="ieqn-78"><mml:math id="mml-ieqn-78"><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>l</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> value of 96.24%.</p>
<fig id="fig-11">
<label>Figure 11</label>
<caption><title><inline-formula id="ieqn-79"><mml:math id="mml-ieqn-79"><mml:mi>R</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>l</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> analysis results of SOSDL-ODC technique and other recent methodologies</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_31786-fig-11.tif"/>
</fig>
<p>A detailed <inline-formula id="ieqn-80"><mml:math id="mml-ieqn-80"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> analysis was conducted between SOSDL-ODC method and other existing models and the results are shown in <xref ref-type="fig" rid="fig-12">Fig. 12</xref>. The figure infers that VGG16, LeNet, and ReseNt50 algorithms achieved the least <inline-formula id="ieqn-81"><mml:math id="mml-ieqn-81"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> values such as 80.90%, 68.55%, and 76.52% correspondingly. Followed by, AlexNet model tried to achieve a superior <inline-formula id="ieqn-82"><mml:math id="mml-ieqn-82"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> value of 73.93%. Further, Sequential RNN, Non-regularized CNN, and Regularized CNN approaches accomplished high <inline-formula id="ieqn-83"><mml:math id="mml-ieqn-83"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> values such as 88.77%, 90.45%, and 91.52%. However, the projected SOSDL-ODC method gained the maximum <inline-formula id="ieqn-84"><mml:math id="mml-ieqn-84"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 97.73%. Based on the discussions made in the above mentioned tables and figures, it is evident that the proposed SOSDL-ODC method is superior to other existing models.</p>
<fig id="fig-12">
<label>Figure 12</label>
<caption><title><inline-formula id="ieqn-85"><mml:math id="mml-ieqn-85"><mml:mi>A</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> analysis of SOSDL-ODC technique and other recent methodologies</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_31786-fig-12.tif"/>
</fig>
</sec>
<sec id="s4">
<label>4</label><title>Conclusion</title>
<p>In this work, a new SOSDL-ODC approach has been developed for recognition and classification of osteosarcoma with the help of histopathological images. The presented SOSDL-ODC approach initially applies pre-processing technique to enhance the quality of images. Then, MobileNetv2 model is applied to generate a suitable group of feature vectors and hyperparameter tuning of MobileNetv2 model is performed using SOS algorithm. Lastly, GRU technique is applied as a classification model to determine the suitable class labels. In order to validate the enhanced osteosarcoma classification performance of the proposed SOSDL-ODC technique, a comprehensive comparative study was conducted. The obtained outcomes established the supremacy of the proposed OSDL-ODC algorithm over other existing approaches. In future, an ensemble of DL-based models can be developed to significantly boost the classification results of SOSDL-ODC model.</p>
</sec>
</body>
<back>
<sec><title>Funding Statement</title>
<p><funding-source>The Deanship of Scientific Research (DSR) at King Abdulaziz University (KAU), Jeddah, Saudi Arabia</funding-source> has funded this project, under grant no <award-id>KEP-1-120-42</award-id>.</p>
</sec>
<sec sec-type="COI-statement"><title>Conflicts of Interest</title>
<p>The authors declare that they have no conflicts of interest to report regarding the present study.</p>
</sec>
<ref-list content-type="authoryear"><title>References</title>
<ref id="ref-1"><label>[1]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>X.</given-names> <surname>Wang</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Wang</surname></string-name>, <string-name><given-names>W.</given-names> <surname>Xu</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Wang</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Ke</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Classification of osteosarcoma based on immunogenomic profiling</article-title>,&#x201D; <source>Frontiers in Cell and Developmental Biology</source>, vol. <volume>9</volume>, pp. <fpage>696878</fpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-2"><label>[2]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>D&#x2019;Acunto</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Martinelli</surname></string-name> and <string-name><given-names>D.</given-names> <surname>Moroni</surname></string-name></person-group>, &#x201C;<article-title>From human mesenchymal stromal cells to osteosarcoma cells classification by deep learning</article-title>,&#x201D; <source>Journal of Intelligent &#x0026; Fuzzy Systems</source>, vol. <volume>37</volume>, no. <issue>6</issue>, pp. <fpage>7199</fpage>&#x2013;<lpage>7206</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-3"><label>[3]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>G.</given-names> <surname>Djuri&#x010D;i&#x0107;</surname></string-name>, <string-name><given-names>J. S.</given-names> <surname>Vasiljevi&#x0107;</surname></string-name>, <string-name><given-names>D. J.</given-names> <surname>Risti&#x0107;</surname></string-name>, <string-name><given-names>R. Z.</given-names> <surname>Kova&#x010D;evi&#x0107;</surname></string-name>, <string-name><given-names>D. V.</given-names> <surname>Risti&#x0107;</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Prediction of chemotherapy response in primary osteosarcoma by use of the multifractal analysis of magnetic resonance images</article-title>,&#x201D; <source>Iranian Journal of Radiology</source>, vol. <volume>15</volume>, no. <issue>2</issue>, pp. <fpage>174</fpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-4"><label>[4]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J.</given-names> <surname>Wang</surname></string-name>, <string-name><given-names>Z.</given-names> <surname>Du</surname></string-name>, <string-name><given-names>R.</given-names> <surname>Yang</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Tang</surname></string-name> and <string-name><given-names>W.</given-names> <surname>Guo</surname></string-name></person-group>, &#x201C;<article-title>Lateral malleolus en bloc resection for the distal fibula osteosarcoma based on a new classification and proposed reconstruction choice: Analysis of 6 cases prognosis and literature review</article-title>,&#x201D; <source>Foot and Ankle Surgery</source>, vol. <volume>26</volume>, no. <issue>8</issue>, pp. <fpage>855</fpage>&#x2013;<lpage>863</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-5"><label>[5]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J.</given-names> <surname>Zhong</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Si</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Geng</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Xing</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Hu</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Chondromyxoid fibroma-like osteosarcoma: A case series and literature review</article-title>,&#x201D; <source>BMC Musculoskeletal Disorders</source>, vol. <volume>21</volume>, no. <issue>1</issue>, pp. <fpage>53</fpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-6"><label>[6]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S. J.</given-names> <surname>Badashah</surname></string-name>, <string-name><given-names>S. S.</given-names> <surname>Basha</surname></string-name>, <string-name><given-names>S. R.</given-names> <surname>Ahamed</surname></string-name> and <string-name><given-names>S. P. V.</given-names> <surname>Subba Rao</surname></string-name></person-group>, &#x201C;<article-title>Fractional-harris hawks optimization-based generative adversarial network for osteosarcoma detection using renyi entropy-hybrid fusion</article-title>,&#x201D; <source>International Journal of Intelligent Systems</source>, vol. <volume>36</volume>, no. <issue>10</issue>, pp. <fpage>6007</fpage>&#x2013;<lpage>6031</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-7"><label>[7]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>H. B.</given-names> <surname>Arunachalam</surname></string-name>, <string-name><given-names>R.</given-names> <surname>Mishra</surname></string-name>, <string-name><given-names>O.</given-names> <surname>Daescu</surname></string-name>, <string-name><given-names>K.</given-names> <surname>Cederberg</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Rakheja</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Viable and necrotic tumor assessment from whole slide images of osteosarcoma using machine-learning and deep-learning models</article-title>,&#x201D; <source>PLOS ONE</source>, vol. <volume>14</volume>, no. <issue>4</issue>, pp. <fpage>e0210706</fpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-8"><label>[8]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Shankar</surname></string-name>, <string-name><given-names>E.</given-names> <surname>Perumal</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Elhoseny</surname></string-name>, <string-name><given-names>F.</given-names> <surname>Taher</surname></string-name>, <string-name><given-names>B. B.</given-names> <surname>Gupta</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Synergic deep learning for smart health diagnosis of COVID-19 for connected living and smart cities</article-title>,&#x201D; <source>ACM Transactions on Internet Technology</source>, vol. <volume>22</volume>, no. <issue>3</issue>, pp. <fpage>1</fpage>&#x2013;<lpage>14</lpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-9"><label>[9]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Shankar</surname></string-name>, <string-name><given-names>E.</given-names> <surname>Perumal</surname></string-name>, <string-name><given-names>V.</given-names> <surname>Garc&#x00ED;aD&#x00ED;az</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Tiwari</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Gupta</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>An optimal cascaded recurrent neural network for intelligent COVID-19 detection using Chest X-ray images</article-title>,&#x201D; <source>Applied Soft Computing</source>, vol. <volume>113</volume>, no. <issue>8</issue>, pp. <fpage>107878</fpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-10"><label>[10]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>D. N.</given-names> <surname>Le</surname></string-name>, <string-name><given-names>V. S.</given-names> <surname>Parvathy</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Gupta</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Khanna</surname></string-name>, <string-name><given-names>J. J. P. C.</given-names> <surname>Rodrigues</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>IoT enabled depthwise separable convolution neural network with deep support vector machine for COVID-19 diagnosis and classification</article-title>,&#x201D; <source>International Journal of Machine Learning and Cybernetics</source>, vol. <volume>12</volume>, no. <issue>11</issue>, pp. <fpage>3235</fpage>&#x2013;<lpage>3248</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-11"><label>[11]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J.</given-names> <surname>Zhong</surname></string-name>, <string-name><given-names>C.</given-names> <surname>Zhang</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Hu</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Zhang</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Liu</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Automated prediction of the neoadjuvant chemotherapy response in osteosarcoma with deep learning and an MRI-based radiomics nomogram</article-title>,&#x201D; <source>European Radiology</source>, vol. <volume>36</volume>, no. <issue>2</issue>, pp. <fpage>188</fpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-12"><label>[12]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>P.</given-names> <surname>Bansal</surname></string-name>, <string-name><given-names>K.</given-names> <surname>Gehlot</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Singhal</surname></string-name> and <string-name><given-names>A.</given-names> <surname>Gupta</surname></string-name></person-group>, &#x201C;<article-title>Automatic detection of osteosarcoma based on integrated features and feature selection using binary arithmetic optimization algorithm</article-title>,&#x201D; <source>Multimedia Tools and Applications</source>, vol. <volume>81</volume>, no. <issue>6</issue>, pp. <fpage>8807</fpage>&#x2013;<lpage>8834</lpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-13"><label>[13]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>L.</given-names> <surname>Shuai</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Gao</surname></string-name> and <string-name><given-names>J.</given-names> <surname>Wang</surname></string-name></person-group>, &#x201C;<article-title>Wnet &#x002B;&#x002B;: A nested w-shaped network with multiscale input and adaptive deep supervision for osteosarcoma segmentation</article-title>,&#x201D; in <conf-name>2021 IEEE 4th Int. Conf. on Electronic Information and Communication Technology (ICEICT)</conf-name>, <publisher-loc>Xi&#x2019;an, China</publisher-loc>, pp. <fpage>93</fpage>&#x2013;<lpage>99</lpage>, <year>2021</year>. </mixed-citation></ref>
<ref id="ref-14"><label>[14]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>D. M.</given-names> <surname>Anisuzzaman</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Barzekar</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Tong</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Luo</surname></string-name> and <string-name><given-names>Z.</given-names> <surname>Yu</surname></string-name></person-group>, &#x201C;<article-title>A deep learning study on osteosarcoma detection from histological images</article-title>,&#x201D; <source>Biomedical Signal Processing and Control</source>, vol. <volume>69</volume>, no. <issue>5</issue>, pp. <fpage>102931</fpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-15"><label>[15]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>R.</given-names> <surname>Mishra</surname></string-name>, <string-name><given-names>O.</given-names> <surname>Daescu</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Leavey</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Rakheja</surname></string-name> and <string-name><given-names>A.</given-names> <surname>Sengupta</surname></string-name></person-group>, &#x201C;<article-title>Convolutional neural network for histopathological analysis of osteosarcoma</article-title>,&#x201D; <source>Journal of Computational Biology</source>, vol. <volume>25</volume>, no. <issue>3</issue>, pp. <fpage>313</fpage>&#x2013;<lpage>325</lpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-16"><label>[16]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>Q.</given-names> <surname>Xiang</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Wang</surname></string-name>, <string-name><given-names>R.</given-names> <surname>Li</surname></string-name>, <string-name><given-names>G.</given-names> <surname>Zhang</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Lai</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Fruit image classification based on Mobilenetv2 with transfer learning technique</article-title>,&#x201D; in <conf-name>Proc. of the 3rd Int. Conf. on Computer Science and Application Engineering</conf-name>, <publisher-loc>New York, United States</publisher-loc>, pp. <fpage>1</fpage>&#x2013;<lpage>7</lpage>, <year>2019</year>. </mixed-citation></ref>
<ref id="ref-17"><label>[17]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>N.</given-names> <surname>Metawa</surname></string-name>, <string-name><given-names>I. V.</given-names> <surname>Pustokhina</surname></string-name>, <string-name><given-names>D. A.</given-names> <surname>Pustokhin</surname></string-name>, <string-name><given-names>K.</given-names> <surname>Shankar</surname></string-name> and <string-name><given-names>M.</given-names> <surname>Elhoseny</surname></string-name></person-group>, &#x201C;<article-title>Computational intelligence-based financial crisis prediction model using feature subset selection with optimal deep belief network</article-title>,&#x201D; <source>Big Data</source>, vol. <volume>9</volume>, no. <issue>2</issue>, pp. <fpage>100</fpage>&#x2013;<lpage>115</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-18"><label>[18]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Shankar</surname></string-name>, <string-name><given-names>S. N.</given-names> <surname>Mohanty</surname></string-name>, <string-name><given-names>K.</given-names> <surname>Yadav</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Gopalakrishnan</surname></string-name> and <string-name><given-names>A. M.</given-names> <surname>Elmisery</surname></string-name></person-group>, &#x201C;<article-title>Automated COVID-19 diagnosis and classification using convolutional neural network with fusion based feature extraction model</article-title>,&#x201D; <source>Cognitive Neurodynamics</source>, vol. <volume>51</volume>, no. <issue>2</issue>, pp. <fpage>854</fpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-19"><label>[19]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>I. V.</given-names> <surname>Pustokhina</surname></string-name>, <string-name><given-names>D. A.</given-names> <surname>Pustokhin</surname></string-name>, <string-name><given-names>E. L.</given-names> <surname>Lydia</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Garg</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Kadian</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Hyperparameter search based convolution neural network with Bi&#x2010;LSTM model for intrusion detection system in multimedia big data environment</article-title>,&#x201D; <source>Multimedia Tools and Applications</source>, vol. <volume>13</volume>, no. <issue>5</issue>, pp. <fpage>111</fpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-20"><label>[20]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A. E.</given-names> <surname>Ezugwu</surname></string-name> and <string-name><given-names>D.</given-names> <surname>Prayogo</surname></string-name></person-group>, &#x201C;<article-title>Symbiotic organisms search algorithm: Theory, recent advances and applications</article-title>,&#x201D; <source>Expert Systems with Applications</source>, vol. <volume>119</volume>, no. <issue>6</issue>, pp. <fpage>184</fpage>&#x2013;<lpage>209</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-21"><label>[21]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>C.</given-names> <surname>Xu</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Shen</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Du</surname></string-name> and <string-name><given-names>F.</given-names> <surname>Zhang</surname></string-name></person-group>, &#x201C;<article-title>An intrusion detection system using a deep neural network with gated recurrent units</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>6</volume>, pp. <fpage>48697</fpage>&#x2013;<lpage>48707</lpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-22"><label>[22]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>P.</given-names> <surname>Leavey</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Sengupta</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Rakheja</surname></string-name>, <string-name><given-names>O.</given-names> <surname>Daescu</surname></string-name>, <string-name><given-names>H. B.</given-names> <surname>Arunachalam</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Osteosarcoma data from ut southwestern/ut dallas for viable and necrotic tumor assessment [Data set]</article-title>,&#x201D; <source>The Cancer Imaging</source>, vol. <volume>14</volume>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-23"><label>[23]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>R. A.</given-names> <surname>Nabid</surname></string-name>, <string-name><given-names>M. L.</given-names> <surname>Rahman</surname></string-name> and <string-name><given-names>M. F.</given-names> <surname>Hossain</surname></string-name></person-group>, &#x201C;<article-title>Classification of osteosarcoma tumor from histological image using sequential RCNN</article-title>,&#x201D; in <conf-name>2020 11th Int. Conf. on Electrical and Computer Engineering (ICECE)</conf-name>, <publisher-loc>Dhaka, Bangladesh</publisher-loc>, <year>2020</year>. </mixed-citation></ref>
<ref id="ref-24"><label>[24]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>I.</given-names> <surname>Ahmed</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Sardar</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Aljuaid</surname></string-name>, <string-name><given-names>F. A.</given-names> <surname>Khan</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Nawaz</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Convolutional neural network for histopathological osteosarcoma image classification</article-title>,&#x201D; <source>Computers, Materials &#x0026; Continua</source>, vol. <volume>69</volume>, no. <issue>3</issue>, pp. <fpage>3365</fpage>&#x2013;<lpage>3381</lpage>, <year>2021</year>.</mixed-citation></ref>
</ref-list>
</back>
</article>