<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.1 20151215//EN" "http://jats.nlm.nih.gov/publishing/1.1/JATS-journalpublishing1.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:mml="http://www.w3.org/1998/Math/MathML" xml:lang="en" article-type="research-article" dtd-version="1.1">
<front>
<journal-meta>
<journal-id journal-id-type="pmc">CSSE</journal-id>
<journal-id journal-id-type="nlm-ta">CSSE</journal-id>
<journal-id journal-id-type="publisher-id">CSSE</journal-id>
<journal-title-group>
<journal-title>Computer Systems Science &#x0026; Engineering</journal-title>
</journal-title-group>
<issn pub-type="ppub">0267-6192</issn>
<publisher>
<publisher-name>Tech Science Press</publisher-name>
<publisher-loc>USA</publisher-loc>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">38322</article-id>
<article-id pub-id-type="doi">10.32604/csse.2023.038322</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Article</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Sand Cat Swarm Optimization with Deep Transfer Learning for Skin Cancer Classification</article-title>
<alt-title alt-title-type="left-running-head">Sand Cat Swarm Optimization with Deep Transfer Learning for Skin Cancer Classification</alt-title>
<alt-title alt-title-type="right-running-head">Sand Cat Swarm Optimization with Deep Transfer Learning for Skin Cancer Classification</alt-title>
</title-group>
<contrib-group>
<contrib id="author-1" contrib-type="author">
<name name-style="western"><surname>Anupama</surname><given-names>C. S. S.</given-names></name><xref ref-type="aff" rid="aff-1">1</xref></contrib>
<contrib id="author-2" contrib-type="author">
<name name-style="western"><surname>Yonbawi</surname><given-names>Saud</given-names></name><xref ref-type="aff" rid="aff-2">2</xref></contrib>
<contrib id="author-3" contrib-type="author">
<name name-style="western"><surname>Moses</surname><given-names>G. Jose</given-names></name><xref ref-type="aff" rid="aff-3">3</xref></contrib>
<contrib id="author-4" contrib-type="author">
<name name-style="western"><surname>Lydia</surname><given-names>E. Laxmi</given-names></name><xref ref-type="aff" rid="aff-4">4</xref></contrib>
<contrib id="author-5" contrib-type="author">
<name name-style="western"><surname>Kadry</surname><given-names>Seifedine</given-names></name><xref ref-type="aff" rid="aff-5">5</xref><xref ref-type="aff" rid="aff-6">6</xref><xref ref-type="aff" rid="aff-7">7</xref></contrib>
<contrib id="author-6" contrib-type="author" corresp="yes">
<name name-style="western"><surname>Kim</surname><given-names>Jungeun</given-names></name><xref ref-type="aff" rid="aff-8">8</xref><email>jekim@kongju.ac.kr</email></contrib>
<aff id="aff-1"><label>1</label><institution>Department of Electronics and Instrumentation Engineering, V. R. Siddhartha Engineering College</institution>, <addr-line>Vijayawada, 520007</addr-line>, <country>India</country></aff>
<aff id="aff-2"><label>2</label><institution>Department of Software Engineering, College of Computer Science and Engineering, University of Jeddah</institution>, <addr-line>Jeddah</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff-3"><label>3</label><institution>Department of Computer Science and Engineering, University Institute of Engineering and Technology (UIET), Guru Nanak University</institution>, <addr-line>Hyderabad</addr-line>, <country>India</country></aff>
<aff id="aff-4"><label>4</label><institution>Department of Computer Science and Engineering, Vignan&#x2019;s Institute of Information Technology</institution>, <addr-line>Visakhapatnam, 530049</addr-line>, <country>India</country></aff>
<aff id="aff-5"><label>5</label><institution>Department of Applied Data Science, Noroff University College</institution>, <addr-line>Kristiansand</addr-line>, <country>Norway</country></aff>
<aff id="aff-6"><label>6</label><institution>Artificial Intelligence Research Center (AIRC), College of Engineering and Information Technology, Ajman University</institution>, <addr-line>Ajman</addr-line>, <country>United Arab Emirates</country></aff>
<aff id="aff-7"><label>7</label><institution>Department of Electrical and Computer Engineering, Lebanese American University</institution>, <addr-line>Byblos</addr-line>, <country>Lebanon</country></aff>
<aff id="aff-8"><label>8</label><institution>Department of Software, Kongju National University</institution>, <addr-line>Cheonan, 31080</addr-line>, <country>Korea</country></aff>
</contrib-group>
<author-notes>
<corresp id="cor1"><label>&#x002A;</label>Corresponding Author: Jungeun Kim. Email: <email>jekim@kongju.ac.kr</email></corresp>
</author-notes>
<pub-date date-type="collection" publication-format="electronic"><year>2023</year></pub-date>
<pub-date date-type="pub" publication-format="electronic"><day>28</day><month>7</month><year>2023</year></pub-date>
<volume>47</volume>
<issue>2</issue>
<fpage>2079</fpage>
<lpage>2095</lpage>
<history>
<date date-type="received">
<day>07</day><month>12</month><year>2022</year>
</date>
<date date-type="accepted">
<day>02</day><month>2</month><year>2023</year>
</date>
</history>
<permissions>
<copyright-statement>&#x00A9; 2023 Anupama et al.</copyright-statement>
<copyright-year>2023</copyright-year>
<copyright-holder>Anupama et al.</copyright-holder>
<license xlink:href="https://creativecommons.org/licenses/by/4.0/">
<license-p>This work is licensed under a <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution 4.0 International License</ext-link>, which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.</license-p>
</license>
</permissions>
<self-uri content-type="pdf" xlink:href="TSP_CSSE_38322.pdf"></self-uri>
<abstract>
<p>Skin cancer is one of the most dangerous cancer. Because of the high melanoma death rate, skin cancer is divided into non-melanoma and melanoma. The dermatologist finds it difficult to identify skin cancer from dermoscopy images of skin lesions. Sometimes, pathology and biopsy examinations are required for cancer diagnosis. Earlier studies have formulated computer-based systems for detecting skin cancer from skin lesion images. With recent advancements in hardware and software technologies, deep learning (DL) has developed as a potential technique for feature learning. Therefore, this study develops a new sand cat swarm optimization with a deep transfer learning method for skin cancer detection and classification (SCSODTL-SCC) technique. The major intention of the SCSODTL-SCC model lies in the recognition and classification of different types of skin cancer on dermoscopic images. Primarily, Dull razor approach-related hair removal and median filtering-based noise elimination are performed. Moreover, the U2Net segmentation approach is employed for detecting infected lesion regions in dermoscopic images. Furthermore, the NASNetLarge-based feature extractor with a hybrid deep belief network (DBN) model is used for classification. Finally, the classification performance can be improved by the SCSO algorithm for the hyperparameter tuning process, showing the novelty of the work. The simulation values of the SCSODTL-SCC model are scrutinized on the benchmark skin lesion dataset. The comparative results assured that the SCSODTL-SCC model had shown maximum skin cancer classification performance in different measures.</p>
</abstract>
<kwd-group kwd-group-type="author">
<kwd>Deep learning</kwd>
<kwd>skin cancer</kwd>
<kwd>dermoscopic images</kwd>
<kwd>sand cat swarm optimization</kwd>
<kwd>machine learning</kwd>
</kwd-group>
<funding-group>
<award-group id="awg1">
<funding-source>Technology Development Program of MSS</funding-source>
<award-id>S3033853</award-id>
</award-group>
<award-group id="awg2">
<funding-source>National University Development Project by the Ministry of Education in 2022</funding-source>
</award-group>
</funding-group>
</article-meta>
</front>
<body>
<sec id="s1">
<label>1</label>
<title>Introduction</title>
<p>Skin cancer is a common type of tumour; if it is identified and treated early, it can mostly be cured. So effective techniques should be developed for the automatic classification of skin cancer [<xref ref-type="bibr" rid="ref-1">1</xref>]. Since the largest part of the human body, the skin is responsible for protecting other human systems, which raises its vulnerability to disease. More skin cancers are identified every year than all other cancers in the US. Melanoma has a five-year survival rate of 99% if it does not spread to other organs [<xref ref-type="bibr" rid="ref-2">2</xref>]. If it spreads to other body parts, the survival rate decreases to 20%. But the early indications of skin cancer are not visible at all times, diagnostic outcomes are reliant on the experience of a dermatologist [<xref ref-type="bibr" rid="ref-3">3</xref>]. For inexperienced doctors, an automated diagnosis mechanism will be a powerful tool for more precise diagnoses. Other than that, identifying skin cancer with the human eye is rarely generalizable. Hence, it is required to design an automatic classification technique for skin cancer that is quicker to diagnose, more precise, and less expensive [<xref ref-type="bibr" rid="ref-4">4</xref>]. In addition, applying this automatic diagnostic mechanism can effectually reduce skin cancer deaths.</p>
<p>Due to the diversity and complexity of skin disease images, the automatic classification of skin cancer remains challenging [<xref ref-type="bibr" rid="ref-5">5</xref>]. Initially, various skin lesions have many interclass similarities, which leads to misdiagnosis. For instance, there are several mimics of skin cancer in histopathological images, namely SCC and other diseases [<xref ref-type="bibr" rid="ref-6">6</xref>]. So, it is tough for the diagnosis mechanism to differentiate skin malignancies from their familiar imitators efficiently. Then, numerous skin lesions vary within similar classes in size, colour, structure, location, and feature [<xref ref-type="bibr" rid="ref-7">7</xref>]. For instance, the presence of skin cancer and its subclasses are different. This makes it hard to categorize different subclasses of the same classes. Also, the classification techniques are extremely sensitive to the types of cameras utilized for capturing images [<xref ref-type="bibr" rid="ref-8">8</xref>].</p>
<p>Machine Learning (ML) techniques are utilized for automating the diagnosis, leading to a framework and system in the medical sector that would help in assisting physicians in communicating, offering contextual relevance, enhancing clinical reliability, lowering medical costs, minimalizing errors relevant to human tiredness, more easily identifying diseases and lowering mortality rates [<xref ref-type="bibr" rid="ref-9">9</xref>]. An ML technique that can classify both benign and malignant pigmented skin lesions was a step toward reaching such objectives. In the presented work, ML methods and Convolutional Neural Networks (CNN) were utilized to precisely categorize pigmented skin lesions in dermoscopic imageries to find malignant skin lesions as soon as possible [<xref ref-type="bibr" rid="ref-10">10</xref>]. However, it remains a challenging problem. Because of the continual deepening of the model, the number of parameters of DL models also increases quickly, which results in model overfitting. At the same time, different hyperparameters significantly impact the efficiency of the CNN model, particularly the learning rate. It is also necessary to modify the learning rate parameter to obtain better performance.</p>
<p>This study develops a new sand cat swarm optimization with a deep transfer learning model for skin cancer detection and classification (SCSODTL-SCC) technique. The major intention of the SCSODTL-SCC model lies in the recognition and classification of different types of skin cancer on dermoscopic images. Primarily, Dull razor method-based hair removal and median filtering-based noise elimination are performed. Moreover, the U2Net segmentation approach is employed to detect infected lesion regions in dermoscopic images. Furthermore, the NASNetLarge-based feature extractor with a hybrid deep belief network (DBN) model is used for classification. Finally, the classification performance can be improved by the SCSO algorithm for the hyperparameter tuning process. The simulation values of the SCSODTL-SCC model are inspected on the benchmark ISIC dataset.</p>
</sec>
<sec id="s2">
<label>2</label>
<title>Related Works</title>
<p>Reshma et al. [<xref ref-type="bibr" rid="ref-11">11</xref>] introduced multilevel thresholding with DL (IMLT-DL) related skin lesion classification and segmentation methods utilizing dermoscopy images to overcome such difficulties. At first, the IMLT-DL method integrates the inpainting and Top hat filtering approach to pre-process the dermoscopic imageries. Likewise, Mayfly Optimized (MFO) plus multilevel Kapur&#x2019;s thresholding-oriented segmenting procedure is included in determining affected areas. In addition, to extract a useful set of feature vectors, an Inception v3-oriented feature extractor is enforced. At last, the classification can be accomplished through a gradient boosting tree (GBT) technique. In [<xref ref-type="bibr" rid="ref-12">12</xref>], a mechanism was presented for identifying melanoma mechanically with the help of an ensemble method, image texture feature extraction, and CNN. For image classification in the CNN stage, 2 CNN models, the VGG-19 and a proposed network are used. Beyond that, texture features were derived, and their dimension will be minimized through kernel PCA (kPCA) to scale up the productivity of classification in extracting feature stage.</p>
<p>In [<xref ref-type="bibr" rid="ref-13">13</xref>], the author designed an ensemble CNNs for a multiclass classification study related to incorporating image preprocessing, risk management, and DL on skin cancer dermoscopic imageries with Meta-data. The author fundamentally exemplified general stacking ensemble flow from Meta-data concatenating, data preprocessing phases, first CNNs ensemble and second meta-classifiers ensemble and CNN&#x2019;s grouping. Shorfuzzaman [<xref ref-type="bibr" rid="ref-14">14</xref>] devises a CNN-oriented stacked ensemble structure for diagnosing skin cancer at the initial level. In stacking ensemble structure, the TL was leveraged in which many CNNs sub-methods that execute the similar classifier task were re-accumulated. A new technique named a meta-learner used all sub-model predictions and generated final predictive outcomes. Kaur et al. [<xref ref-type="bibr" rid="ref-15">15</xref>] suggest an automatic technique related to a DCNN to classify benign and malignant melanoma precisely. The DCNN structure is devised prudently by forming multiple layers that are liable for the extraction of high and low-level attributes of skin imageries in a unique way.</p>
<p>Chaturvedi et al. [<xref ref-type="bibr" rid="ref-16">16</xref>] modelled an automatic CAD mechanism for a multi-class skin (MCS) cancer classification with maximum precision. The presented technique outpaced modern DL techniques and expert dermatologists for MCS cancer classification. The author executed optimal tuning on 7 classes of HAM10000 data and performed a brief analysis to examine the efficiency of 4 ensemble models and 5 pre-trained CNNs. The authors [<xref ref-type="bibr" rid="ref-17">17</xref>] developed a DCNN-related method to classify skin cancer types automatically into non-melanoma and melanoma with more precision. To solve such 2 issues and attain high classification efficiency, the author leveraged EfficientNet architecture related to TL approaches, which learned fine-grained and more complex paradigms from lesion images by mechanically scaling the resolution, depth, and width of the network.</p>
</sec>
<sec id="s3">
<label>3</label>
<title>The Proposed Model</title>
<p>In this study, an automatic skin cancer classification utilizing the SCSODTL-SCC technique has been developed on dermoscopic images. The SCSODTL-SCC model&#x0027;s goal is to detect and classify different kinds of skin cancer on dermoscopic images. In the presented SCSODTL-SCC model, various subprocesses are involved, namely preprocessing, U2Net segmentation, NASNetLarge feature extraction, hybrid DBN classification, and SCSO hyperparameter tuning. <xref ref-type="fig" rid="fig-1">Fig. 1</xref> represents the overall process of the SCSODTL-SCC approach.</p>
<fig id="fig-1">
<label>Figure 1</label>
<caption>
<title>Working process of the SCSODTL-SCC system</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CSSE_38322-fig-1.tif"/>
</fig>
<sec id="s3_1">
<label>3.1</label>
<title>Image Preprocessing</title>
<p>Firstly, Dull razor approach-related hair removal and median filtering-based noise elimination are performed. In dermoscopic images, hairs are removed using the DullRazor technique for accurate detection and segmentation. This method makes hair removal on lesions in three phases. In the primary stage, it identifies hair positions through a grayscale morphological closing function. In the subsequent stage, it determines hair positions by monitoring the thickness and length of identified shapes. Later, the ensured pixels will be interchanged as a bilinear interpolation method. Finally, it smoothened the exchanged pixel utilizing a median filter. Moreover, the U2Net segmentation approach is employed to detect infected lesion regions in dermoscopic images. U2-Net can be referred to as an advanced DL method for background removal, which generates the mask that can be further leveraged for segmenting the image through image processing functionalities of Pillow and OpenCV libraries. The image with background is given to the U2-Net model, which produced a mask for the image. The mask can extract the RoI from the original image without including the background.</p>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Feature Extraction Using NASNet Model</title>
<p>At this stage, the NASNetLarge-based feature extractor is applied. Neural architecture search (NAS) is one of the authors&#x2019; essential search processes during this network. The child network was revealed for accomplishing any accuracy on the validation set that notably existed for convergence [<xref ref-type="bibr" rid="ref-18">18</xref>]. The succeeding accuracy value has been employed for upgrading the controller that successively creates optimum structure over time. This procedure gradient occurs for updating the controller weighted. During this method, the entire convolutional net infrastructure existed manually. It can be collected of convolutional cells which take related shapes as a new but are weight in a distinct approach. 2 kinds of convolutional cells are occurring for quickly evolving scalable structures for images of several sizes: (i) convolutional cells produce feature maps having a 2-fold decrease in height and width, and (ii) convolutional cells return feature map with same dimensional.</p>
<p>These 2 types of convolutional cells can be demonstrated as reduction and normal cells correspondingly. An initial procedure utilized for the cell input provides a 2-step stride for minimalizing the cell height and width. The convolutional cell assists striding as it can assume all the operations. The normal and reduction cells infrastructure the controller RNN is searching for is different from the convolutional net. The search area was employed to search all the cell shapes. There were two hidden states (HS) such as <inline-formula id="ieqn-1"><mml:math id="mml-ieqn-1"><mml:mi>h</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>i</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> and <inline-formula id="ieqn-2"><mml:math id="mml-ieqn-2"><mml:mi>h</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>i</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula>, obtainable to all the cells from the search space. The controller RNN creates recursive predictive on the residual convolutional cells on the fundamental of 2 initial HSs. The controller predictive to every cell was orderly as to B blocks, with all the blocks containing 5 predictive stages executed by 5 separate SoftMax classifiers signifying various selective block elements.</p>
<p>Step1. Select HS from <inline-formula id="ieqn-3"><mml:math id="mml-ieqn-3"><mml:mi>h</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>i</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula>, <inline-formula id="ieqn-4"><mml:math id="mml-ieqn-4"><mml:mi>h</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>i</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula>, or group before creating HS.</p>
<p>Step2. During the same selection as in Step 1, select the next HS.</p>
<p>Step3. In Step 1, choose the HS that the authors require for employing.</p>
<p>Step4. Then selecting an HS in Step 2, select a process for employing.</p>
<p>Step5. Determine that outcome in Steps 3 &#x0026; 4 are combined to generate a new HS. It could be potential for applying the newly created HS as an input in the ensuing block.</p>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Skin Cancer Classification Using Optimal DBN Model</title>
<p>At this stage, the DBN model analyses the feature vectors for the classification process. The DNN technique includes four layers of pre-trained RBM and resultant layers (Softmax Regression) [<xref ref-type="bibr" rid="ref-19">19</xref>]. The parameter can be estimated by the trained method previously employing DBN for classifying and representing assaults. The training of DBN is categorized as pre-train for presenting and fine-tuning to classifiers. At the same time, the resulting DBN is transmitted to inputs of Softmax Regression and comprised in DBN, which includes stacked RBM. Primarily, DBN was training to reconfigure untagged training databases and therefore executed unsupervised. Edata [.] and Emodel [.] can be expectations of possibilities.</p>
<p><disp-formula id="eqn-1">
<label>(1)</label>
<mml:math id="mml-eqn-1" display="block"><mml:mfrac><mml:mrow><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:mi>log</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mi>P</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mrow><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mfrac><mml:mo>=</mml:mo><mml:msub><mml:mi>E</mml:mi><mml:mrow><mml:mi>d</mml:mi><mml:mi>a</mml:mi><mml:mi>t</mml:mi><mml:mi>a</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">[</mml:mo><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">]</mml:mo><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>E</mml:mi><mml:mrow><mml:mrow><mml:mi mathvariant="italic">m</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">d</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">l</mml:mi></mml:mrow></mml:mrow></mml:msub><mml:mo stretchy="false">[</mml:mo><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">]</mml:mo></mml:math></disp-formula></p>
<p><disp-formula id="eqn-2">
<label>(2)</label>
<mml:math id="mml-eqn-2" display="block"><mml:mfrac><mml:mrow><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:mi>log</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mi>P</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mrow><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mfrac><mml:mo>=</mml:mo><mml:msub><mml:mi>E</mml:mi><mml:mrow><mml:mi>d</mml:mi><mml:mi>a</mml:mi><mml:mi>t</mml:mi><mml:mi>a</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">[</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">]</mml:mo><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>E</mml:mi><mml:mrow><mml:mrow><mml:mi mathvariant="italic">m</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">d</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">l</mml:mi></mml:mrow></mml:mrow></mml:msub><mml:mo stretchy="false">[</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">]</mml:mo></mml:math></disp-formula></p>
<p><disp-formula id="eqn-3">
<label>(3)</label>
<mml:math id="mml-eqn-3" display="block"><mml:mfrac><mml:mrow><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:mi>log</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mi>P</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mrow><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:msub><mml:mi>b</mml:mi><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mfrac><mml:mo>=</mml:mo><mml:msub><mml:mi>E</mml:mi><mml:mrow><mml:mi>d</mml:mi><mml:mi>a</mml:mi><mml:mi>t</mml:mi><mml:mi>a</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">[</mml:mo><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">]</mml:mo><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>E</mml:mi><mml:mrow><mml:mrow><mml:mi mathvariant="italic">m</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">d</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">l</mml:mi></mml:mrow></mml:mrow></mml:msub><mml:mo stretchy="false">[</mml:mo><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">]</mml:mo></mml:math></disp-formula></p>
<p>During this work, the 3 approaches (<xref ref-type="disp-formula" rid="eqn-1">Eqs. (1)</xref>&#x2013;<xref ref-type="disp-formula" rid="eqn-3">(3)</xref>) primarily comprised in typical DBN network; the second term cannot be reached directly. While it can be probability in distribution which is considered utilizing the DBN. Gibb&#x2019;s sampling was utilized to calculate this probability. But, this approach is time-consuming and is utilized in real-time. The contrastive divergence (CD) process was utilized to determine optimum solutions, a fast-learning technique. Primarily, a trained sample has been employed to finish the Markov chain&#x2019;s beginning. Afterwards, instances are obtained, and then <inline-formula id="ieqn-5"><mml:math id="mml-ieqn-5"><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mi>e</mml:mi><mml:mspace width="thinmathspace" /><mml:mi>k</mml:mi></mml:math></inline-formula> steps of Gibb&#x2019;s sampling. This approach is termed CD-k. Notably, the efficiency of CD is adequate if <inline-formula id="ieqn-6"><mml:math id="mml-ieqn-6"><mml:mi>k</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:math></inline-formula>.</p>
<p>During this work, for training, stacked RBM layer-wise to generate DBN, <inline-formula id="ieqn-7"><mml:math id="mml-ieqn-7"><mml:mi>b</mml:mi></mml:math></inline-formula>, and <inline-formula id="ieqn-8"><mml:math id="mml-ieqn-8"><mml:mi>W</mml:mi></mml:math></inline-formula> parameters can be upgraded dependent upon CD-1.</p>
<p><disp-formula id="eqn-4">
<label>(4)</label>
<mml:math id="mml-eqn-4" display="block"><mml:msup><mml:mi>W</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msup><mml:mo>=</mml:mo><mml:msup><mml:mi>W</mml:mi><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msup><mml:mo>+</mml:mo><mml:mi>&#x03B5;</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>P</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mfrac><mml:mi>h</mml:mi><mml:msup><mml:mi>x</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>0</mml:mn><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msup></mml:mfrac><mml:mo>)</mml:mo></mml:mrow><mml:mo stretchy="false">[</mml:mo><mml:msup><mml:mi>x</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>0</mml:mn><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msup><mml:msup><mml:mo stretchy="false">]</mml:mo><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:msup><mml:mo>&#x2212;</mml:mo><mml:mi>P</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mfrac><mml:mi>h</mml:mi><mml:msup><mml:mi>x</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msup></mml:mfrac><mml:mo>)</mml:mo></mml:mrow><mml:mo stretchy="false">[</mml:mo><mml:msup><mml:mi>x</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msup><mml:msup><mml:mo stretchy="false">]</mml:mo><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:msup><mml:mo>)</mml:mo></mml:mrow><mml:msup><mml:mi>a</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msup><mml:mo>=</mml:mo><mml:msup><mml:mi>a</mml:mi><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msup><mml:mo>+</mml:mo><mml:mi>&#x03B5;</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:msup><mml:mi>x</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>0</mml:mn><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msup><mml:mo>&#x2212;</mml:mo><mml:msup><mml:mi>x</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msup><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula></p>
<p><disp-formula id="ueqn-5">
<mml:math id="mml-ueqn-5" display="block"><mml:msup><mml:mi>b</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msup><mml:mo>=</mml:mo><mml:msup><mml:mi>b</mml:mi><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msup><mml:mo>+</mml:mo><mml:mi>&#x03B5;</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>P</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mfrac><mml:mi>h</mml:mi><mml:msup><mml:mi>x</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>0</mml:mn><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msup></mml:mfrac><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>P</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mfrac><mml:mi>h</mml:mi><mml:msup><mml:mi>x</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msup></mml:mfrac><mml:mo>)</mml:mo></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula></p>
<p>In this formula, <inline-formula id="ieqn-9"><mml:math id="mml-ieqn-9"><mml:mo>&#x2208;</mml:mo></mml:math></inline-formula> represents the rate of learning, and <inline-formula id="ieqn-10"><mml:math id="mml-ieqn-10"><mml:mi>t</mml:mi></mml:math></inline-formula> stands for the time steps. The visible variable is represented as <inline-formula id="ieqn-11"><mml:math id="mml-ieqn-11"><mml:mi>h</mml:mi><mml:mo>=</mml:mo><mml:mo fence="false" stretchy="false">{</mml:mo><mml:mi>h</mml:mi><mml:mo fence="false" stretchy="false">}</mml:mo></mml:math></inline-formula> plus hidden variables as <inline-formula id="ieqn-12"><mml:math id="mml-ieqn-12"><mml:mi>x</mml:mi><mml:mo>=</mml:mo><mml:mo fence="false" stretchy="false">{</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>m</mml:mi></mml:mrow></mml:msub><mml:mo fence="false" stretchy="false">}</mml:mo></mml:math></inline-formula>. At this point, <inline-formula id="ieqn-13"><mml:math id="mml-ieqn-13"><mml:mi>M</mml:mi></mml:math></inline-formula> and <inline-formula id="ieqn-14"><mml:math id="mml-ieqn-14"><mml:mi>N</mml:mi></mml:math></inline-formula> nodes were from visible and hidden layers. The weighted feature vectors can be introduced arbitrarily in the network by sampling from the CD technique.</p>
<p>The stages for implementing greedy layer-wise trained processes to every layer of DBN are provided. <xref ref-type="fig" rid="fig-2">Fig. 2</xref> showcases the framework of DBN. A primary RBM trained, the data appropriate to <inline-formula id="ieqn-15"><mml:math id="mml-ieqn-15"><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mi>e</mml:mi><mml:mspace width="thinmathspace" /><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:math></inline-formula> parameter was assumed as input <inline-formula id="ieqn-16"><mml:math id="mml-ieqn-16"><mml:mi>x</mml:mi><mml:mo>.</mml:mo><mml:mspace width="thinmathspace" /><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:math></inline-formula> is frozen and executed to train RBM <inline-formula id="ieqn-17"><mml:math id="mml-ieqn-17"><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:math></inline-formula> as <inline-formula id="ieqn-18"><mml:math id="mml-ieqn-18"><mml:mi>Q</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:mi>v</mml:mi></mml:mfrac></mml:mstyle><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mi>P</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msub><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mfrac></mml:mstyle><mml:mo>,</mml:mo><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula> to train RBM and the subsequent binary feature layer. <inline-formula id="ieqn-19"><mml:math id="mml-ieqn-19"><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msub></mml:math></inline-formula>, which defines 2-layer features frozen, and the database crucial to train the binary feature at 3 layers can be obtained in <inline-formula id="ieqn-20"><mml:math id="mml-ieqn-20"><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msub></mml:math></inline-formula> as <inline-formula id="ieqn-21"><mml:math id="mml-ieqn-21"><mml:mrow><mml:mo>(</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msub><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mfrac></mml:mstyle><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mi>P</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msub><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mfrac></mml:mstyle><mml:mo>,</mml:mo><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula>. This process always repeats across all the layers. LR was employed in the typical binary classifier. However, this study chose Softmax as there occur several classifiers in DBN.</p>
<fig id="fig-2">
<label>Figure 2</label>
<caption>
<title>Architecture of DBN</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CSSE_38322-fig-2.tif"/>
</fig>
<p>As the trained set <inline-formula id="ieqn-22"><mml:math id="mml-ieqn-22"><mml:mo fence="false" stretchy="false">{</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:msubsup><mml:mi>z</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msubsup><mml:mo>,</mml:mo><mml:msub><mml:mi>y</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo><mml:mo>,</mml:mo></mml:math></inline-formula> <inline-formula id="ieqn-23"><mml:math id="mml-ieqn-23"><mml:mo stretchy="false">(</mml:mo><mml:msubsup><mml:mi>z</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>2</mml:mn><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msubsup><mml:mo>,</mml:mo><mml:msub><mml:mi>y</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>2</mml:mn><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula>, <inline-formula id="ieqn-24"><mml:math id="mml-ieqn-24"><mml:mo stretchy="false">(</mml:mo><mml:msubsup><mml:mi>z</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>m</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msubsup><mml:mo>,</mml:mo><mml:msub><mml:mi>y</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>m</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo><mml:mo fence="false" stretchy="false">}</mml:mo><mml:mo>,</mml:mo></mml:math></inline-formula> <inline-formula id="ieqn-25"><mml:math id="mml-ieqn-25"><mml:mi>m</mml:mi></mml:math></inline-formula> signifies the count of instances from the trained set and <inline-formula id="ieqn-26"><mml:math id="mml-ieqn-26"><mml:mo stretchy="false">(</mml:mo><mml:msubsup><mml:mi>z</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msubsup><mml:mo>,</mml:mo><mml:msubsup><mml:mi>z</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>m</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msubsup><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> represents hidden vectors of top RBM. The subsequent formula estimated the Softmax function &#x03D5; in resultant layers, <inline-formula id="ieqn-27"><mml:math id="mml-ieqn-27"><mml:mi>j</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn><mml:mo>,</mml:mo><mml:mo>&#x2026;</mml:mo><mml:mo>,</mml:mo></mml:math></inline-formula> <inline-formula id="ieqn-28"><mml:math id="mml-ieqn-28"><mml:mi>k</mml:mi></mml:math></inline-formula> to every class, the conditional probability of <inline-formula id="ieqn-29"><mml:math id="mml-ieqn-29"><mml:mi>P</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>y</mml:mi><mml:mo>=</mml:mo><mml:mi>j</mml:mi><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:msup><mml:mi>z</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msup><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula>.</p>
<p><disp-formula id="eqn-5">
<label>(5)</label>
<mml:math id="mml-eqn-5" display="block"><mml:mi>P</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>y</mml:mi><mml:mo>=</mml:mo><mml:mi>j</mml:mi><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:msup><mml:mi>z</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msup><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:msub><mml:mi>&#x03D5;</mml:mi><mml:mrow><mml:mi>s</mml:mi><mml:mi>o</mml:mi><mml:mrow><mml:mi mathvariant="italic">f</mml:mi><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">m</mml:mi><mml:mi mathvariant="italic">a</mml:mi><mml:mi mathvariant="italic">x</mml:mi></mml:mrow></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:msup><mml:mi>z</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msup><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mfrac><mml:msup><mml:mi>e</mml:mi><mml:mrow><mml:mrow><mml:msup><mml:mi>z</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msup></mml:mrow></mml:mrow></mml:msup><mml:mrow><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>k</mml:mi></mml:mrow></mml:munderover><mml:msup><mml:mi>e</mml:mi><mml:mrow><mml:msubsup><mml:mi>z</mml:mi><mml:mrow><mml:mi>k</mml:mi></mml:mrow><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msubsup></mml:mrow></mml:msup></mml:mrow></mml:mfrac></mml:math></disp-formula></p>
<p>In <xref ref-type="disp-formula" rid="eqn-5">Eq. (5)</xref>, <inline-formula id="ieqn-30"><mml:math id="mml-ieqn-30"><mml:msub><mml:mi>z</mml:mi><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msub><mml:mo>&#x2208;</mml:mo><mml:msup><mml:mi>R</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msup></mml:math></inline-formula> represents the top-most confidential vector, and it can be offered in the subsequent:</p>
<p><disp-formula id="eqn-6">
<label>(6)</label>
<mml:math id="mml-eqn-6" display="block"><mml:mi>z</mml:mi><mml:mo>=</mml:mo><mml:msub><mml:mi>w</mml:mi><mml:mrow><mml:mn>0</mml:mn></mml:mrow></mml:msub><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mn>0</mml:mn></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>w</mml:mi><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:mo>&#x2026;</mml:mo><mml:mo>+</mml:mo><mml:msub><mml:mi>w</mml:mi><mml:mrow><mml:mi>m</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>m</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>l</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>m</mml:mi></mml:mrow></mml:msubsup><mml:msub><mml:mi>w</mml:mi><mml:mrow><mml:mi>l</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>l</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:msup><mml:mi>w</mml:mi><mml:mrow><mml:mrow><mml:msub><mml:mi>T</mml:mi><mml:mrow><mml:mi>X</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow></mml:msup></mml:math></disp-formula></p>
<p>Finally, the classification performance can be improved by the SCSO algorithm for the hyperparameter tuning process.</p>
<p>The SCSO algorithm was inspired by the foraging behaviours of sand cats in the desert [<xref ref-type="bibr" rid="ref-20">20</xref>]. They are capable of detecting lower-frequency noise to find prey either below or above ground. The model considers optimum values in the exploration stage as prey, and the search process always explores searching space via position update, finally moving nearer to the region where the optimum value is positioned. The SCSO model was intended with the prey attack and prey search mechanisms. The search prey system simulates the behaviour of sand cats&#x0027; search for prey.</p>
<p><disp-formula id="eqn-7">
<label>(7)</label>
<mml:math id="mml-eqn-7" display="block"><mml:mrow><mml:mover><mml:mi>X</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mi>r</mml:mi><mml:mo>&#x22C5;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mrow><mml:mover><mml:mi>X</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>b</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>O</mml:mi><mml:mo>,</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x22C5;</mml:mo><mml:mrow><mml:mover><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>c</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula></p>
<p>In <xref ref-type="disp-formula" rid="eqn-7">Eq. (7)</xref>, <inline-formula id="ieqn-31"><mml:math id="mml-ieqn-31"><mml:mrow><mml:mover><mml:mi>X</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow></mml:math></inline-formula> and <inline-formula id="ieqn-32"><mml:math id="mml-ieqn-32"><mml:mrow><mml:mover><mml:mi>X</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow></mml:math></inline-formula> are the location vector and current place of the search agent, <inline-formula id="ieqn-33"><mml:math id="mml-ieqn-33"><mml:mi>t</mml:mi></mml:math></inline-formula> is the present iteration, <inline-formula id="ieqn-34"><mml:math id="mml-ieqn-34"><mml:mrow><mml:mover><mml:mi>X</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow></mml:math></inline-formula> is the better position of the candidate, <inline-formula id="ieqn-35"><mml:math id="mml-ieqn-35"><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mspace width="thinmathspace" /><mml:mi>r</mml:mi></mml:math></inline-formula> is a range of sensitivity of sand cats to lower frequency noise, that is formulated below:</p>
<p><disp-formula id="eqn-8">
<label>(8)</label>
<mml:math id="mml-eqn-8" display="block"><mml:mi>r</mml:mi><mml:mo>=</mml:mo><mml:msub><mml:mrow><mml:mover><mml:mi>r</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>G</mml:mi></mml:mrow></mml:msub><mml:mo>&#x00D7;</mml:mo><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>O</mml:mi><mml:mo>,</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula></p>
<p>In <xref ref-type="disp-formula" rid="eqn-8">Eq. (8)</xref>, <inline-formula id="ieqn-36"><mml:math id="mml-ieqn-36"><mml:msub><mml:mrow><mml:mover><mml:mi>r</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> is the sensitivity range that is linearly declined from 2-<inline-formula id="ieqn-37"><mml:math id="mml-ieqn-37"><mml:mn>0</mml:mn></mml:math></inline-formula>, as follows:</p>
<p><disp-formula id="eqn-9">
<label>(9)</label>
<mml:math id="mml-eqn-9" display="block"><mml:mrow><mml:mover><mml:mi>r</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mo>=</mml:mo><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>M</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mfrac><mml:mrow><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>M</mml:mi></mml:mrow></mml:msub><mml:mo>&#x00D7;</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>c</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mo movablelimits="true" form="prefix">max</mml:mo></mml:mrow></mml:msub></mml:mrow></mml:mfrac><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula></p>
<p>In <xref ref-type="disp-formula" rid="eqn-9">Eq. (9)</xref>, <inline-formula id="ieqn-38"><mml:math id="mml-ieqn-38"><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>c</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> represents the present iteration, and <inline-formula id="ieqn-39"><mml:math id="mml-ieqn-39"><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mo movablelimits="true" form="prefix">max</mml:mo></mml:mrow></mml:msub></mml:math></inline-formula> means the maximal iteration. Furthermore, the sand cat senses a lower frequency of <inline-formula id="ieqn-40"><mml:math id="mml-ieqn-40"><mml:mn>2</mml:mn><mml:mi>k</mml:mi><mml:mi>h</mml:mi><mml:mi>z</mml:mi></mml:math></inline-formula>; the value of <inline-formula id="ieqn-41"><mml:math id="mml-ieqn-41"><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>M</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> is 2.</p>
<p>The SCSO technique attacks prey after prey search, and the prey attack system for sand cats can be defined using <xref ref-type="disp-formula" rid="eqn-10">Eqs. (10)</xref> and <xref ref-type="disp-formula" rid="eqn-11">(11)</xref>:</p>
<p><disp-formula id="eqn-10">
<label>(10)</label>
<mml:math id="mml-eqn-10" display="block"><mml:msubsup><mml:mi>x</mml:mi><mml:mrow><mml:mi>r</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:mrow><mml:mo>|</mml:mo><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mn>0</mml:mn><mml:mo>,</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x22C5;</mml:mo><mml:msub><mml:mrow><mml:mover><mml:mi>X</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>b</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mrow><mml:mover><mml:mi>X</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>c</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>|</mml:mo></mml:mrow></mml:math></disp-formula></p>
<p><disp-formula id="eqn-11">
<label>(11)</label>
<mml:math id="mml-eqn-11" display="block"><mml:mrow><mml:mover><mml:mi>X</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:msub><mml:mrow><mml:mover><mml:mi>X</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>b</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mover><mml:mi>r</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mo>&#x22C5;</mml:mo><mml:msubsup><mml:mi>x</mml:mi><mml:mrow><mml:mi>r</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mrow></mml:msubsup><mml:mo>&#x22C5;</mml:mo><mml:mi>cos</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi>&#x03B8;</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula></p><p>where <inline-formula id="ieqn-42"><mml:math id="mml-ieqn-42"><mml:mi>&#x03B8;</mml:mi></mml:math></inline-formula> indicates a random angle between <inline-formula id="ieqn-43"><mml:math id="mml-ieqn-43"><mml:mn>0</mml:mn></mml:math></inline-formula> to 360, hence <inline-formula id="ieqn-44"><mml:math id="mml-ieqn-44"><mml:mi>cos</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>&#x03B8;</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> takes values within [&#x2212;1, 1]. <inline-formula id="ieqn-45"><mml:math id="mml-ieqn-45"><mml:msubsup><mml:mi>x</mml:mi><mml:mrow><mml:mi>r</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mrow></mml:msubsup></mml:math></inline-formula> indicates the random location produced by the better and the existing location. Using this algorithm, every member of the population is capable of moving in different directions. Every sand cat selects a random angle. Thus, the sand cat avoids local optima traps while coming close to the prey location. The random angle in <xref ref-type="disp-formula" rid="eqn-11">Eq. (11)</xref> allows for the effecting hunt and search direction of the agent.</p>
<p>EXPLORATION AND EXPLOITATION</p>
<p>SCSO balance the exploitation and exploration stages via adaptive factor <inline-formula id="ieqn-46"><mml:math id="mml-ieqn-46"><mml:mrow><mml:mover><mml:mi>R</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow></mml:math></inline-formula> that is formulated by:</p>
<p><disp-formula id="eqn-12">
<label>(12)</label>
<mml:math id="mml-eqn-12" display="block"><mml:mi>R</mml:mi><mml:mo>=</mml:mo><mml:mn>2</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:msub><mml:mrow><mml:mover><mml:mi>r</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>G</mml:mi></mml:mrow></mml:msub><mml:mo>&#x00D7;</mml:mo><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>O</mml:mi><mml:mo>,</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mrow><mml:mover><mml:mi>r</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:math></disp-formula></p>
<p>Since the count of iterations increases, <inline-formula id="ieqn-47"><mml:math id="mml-ieqn-47"><mml:msub><mml:mrow><mml:mover><mml:mi>r</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> linearly declined from 2 to <inline-formula id="ieqn-48"><mml:math id="mml-ieqn-48"><mml:mn>0</mml:mn></mml:math></inline-formula>. The position of every sand cat during the exploitation and exploration stage can be described as follows:</p>
<p><disp-formula id="eqn-13">
<label>(13)</label>
<mml:math id="mml-eqn-13" display="block"><mml:mrow><mml:mover><mml:mi>X</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mtable columnalign="left left" rowspacing=".2em" columnspacing="1em" displaystyle="false"><mml:mtr><mml:mtd><mml:mi>r</mml:mi><mml:mo>&#x22C5;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mrow><mml:mover><mml:mi>X</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>b</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>O</mml:mi><mml:mo>,</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x22C5;</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>c</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mtd><mml:mtd><mml:mrow><mml:mo>|</mml:mo><mml:mi>R</mml:mi><mml:mo>|</mml:mo></mml:mrow><mml:mo>&#x003E;</mml:mo><mml:mn>1</mml:mn></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:msub><mml:mrow><mml:mover><mml:mi>X</mml:mi><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>b</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>r</mml:mi><mml:mo>&#x22C5;</mml:mo><mml:msubsup><mml:mi>x</mml:mi><mml:mrow><mml:mi>r</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">&#x2192;</mml:mo></mml:mrow></mml:msubsup><mml:mo>&#x22C5;</mml:mo><mml:mi>cos</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi>&#x03B8;</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mtd><mml:mtd><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mi>R</mml:mi><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mo>&#x2264;</mml:mo><mml:mn>1</mml:mn></mml:mtd></mml:mtr></mml:mtable><mml:mo fence="true" stretchy="true" symmetric="true"></mml:mo></mml:mrow></mml:math></disp-formula></p>
<p>In <xref ref-type="disp-formula" rid="eqn-13">Eq. (13)</xref>, the searching agent of SCSO attacks targeted prey if <inline-formula id="ieqn-49"><mml:math id="mml-ieqn-49"><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mi>R</mml:mi><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mo>&#x2264;</mml:mo><mml:mn>1</mml:mn></mml:math></inline-formula>, or else the searching agent globally searches for a potential solution. Every sand cat has a different search radius in the exploration stage, preventing the algorithm from getting trapped in the local optima solution.</p>
<p>The SCSO has the subsequent feature: 1) every population member moves in distinct circular directions that safeguard that the mechanism has the possibility of getting closer to the prey and achieving an improved convergence rate. 2) SCSO features a simple framework with fewer parameters that are easily implemented. 3) SCSO consider the position of the optimum solution as prey. The mechanism is evaded to getting trapped to search stagnation by the following angle. 4) SCSO is capable of balancing exploration and exploitation stages for improving the convergence speed. 5) SCSO retains the global optimum solution in the iteration method, and the prey location isn&#x0027;t affected by reducing population quality.</p>
<p>The SCSO approach derived fitness function (FF) to have a better classifier outcome. It determined positive values for signifying the superior outcome of the candidate solutions. In this study, minimized classifier rate of error will be treated as FF, as seen in <xref ref-type="disp-formula" rid="eqn-14">Eq. (14)</xref>.</p>
<p><disp-formula id="ueqn-15">
<mml:math id="mml-ueqn-15" display="block"><mml:mrow><mml:mi mathvariant="italic">f</mml:mi><mml:mi mathvariant="italic">i</mml:mi><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">n</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">s</mml:mi></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mi mathvariant="italic">C</mml:mi><mml:mi mathvariant="italic">l</mml:mi><mml:mi mathvariant="italic">a</mml:mi><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">i</mml:mi><mml:mi mathvariant="italic">f</mml:mi><mml:mi mathvariant="italic">i</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">r</mml:mi><mml:mspace width="thinmathspace" /><mml:mi mathvariant="italic">E</mml:mi><mml:mi mathvariant="italic">r</mml:mi><mml:mi mathvariant="italic">r</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">r</mml:mi><mml:mspace width="thinmathspace" /><mml:mi mathvariant="italic">R</mml:mi><mml:mi mathvariant="italic">a</mml:mi><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">e</mml:mi></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula></p>
<p><disp-formula id="eqn-14">
<label>(14)</label>
<mml:math id="mml-eqn-14" display="block"><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mrow><mml:mi mathvariant="italic">n</mml:mi><mml:mi mathvariant="italic">u</mml:mi><mml:mi mathvariant="italic">m</mml:mi><mml:mi mathvariant="italic">b</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">r</mml:mi></mml:mrow><mml:mspace width="thinmathspace" /><mml:mi>o</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thinmathspace" /><mml:mrow><mml:mi mathvariant="italic">m</mml:mi><mml:mi mathvariant="italic">i</mml:mi><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">c</mml:mi><mml:mi mathvariant="italic">l</mml:mi><mml:mi mathvariant="italic">a</mml:mi><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">i</mml:mi><mml:mi mathvariant="italic">f</mml:mi><mml:mi mathvariant="italic">i</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">d</mml:mi></mml:mrow><mml:mspace width="thinmathspace" /><mml:mrow><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">a</mml:mi><mml:mi mathvariant="italic">m</mml:mi><mml:mi mathvariant="italic">p</mml:mi><mml:mi mathvariant="italic">l</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">s</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mrow><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">t</mml:mi><mml:mi mathvariant="italic">a</mml:mi><mml:mi mathvariant="italic">l</mml:mi></mml:mrow><mml:mspace width="thinmathspace" /><mml:mrow><mml:mi mathvariant="italic">n</mml:mi><mml:mi mathvariant="italic">u</mml:mi><mml:mi mathvariant="italic">m</mml:mi><mml:mi mathvariant="italic">b</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">r</mml:mi></mml:mrow><mml:mspace width="thinmathspace" /><mml:mi>o</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thinmathspace" /><mml:mrow><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">a</mml:mi><mml:mi mathvariant="italic">m</mml:mi><mml:mi mathvariant="italic">p</mml:mi><mml:mi mathvariant="italic">l</mml:mi><mml:mi mathvariant="italic">e</mml:mi><mml:mi mathvariant="italic">s</mml:mi></mml:mrow></mml:mrow></mml:mfrac><mml:mo>&#x2217;</mml:mo><mml:mn>100</mml:mn></mml:math></disp-formula></p>
</sec>
</sec>
<sec id="s4">
<label>4</label>
<title>Results and Discussion</title>
<p>The proposed model is simulated using Python 3.6.5 tool. The proposed model has experimented on PC i5-8600k, GeForce 1050Ti 4 GB, 16 GB RAM, 250 GB SSD, and 1TB HDD. The parameter settings are given as follows: learning rate: 0.01, dropout: 0.5, batch size: 5, epoch count: 50, and activation: ReLU. In this section, the results examined the performance of the SCSODTL-SCC method on the ISIC dataset (available at <ext-link ext-link-type="uri" xlink:href="https://www.isic-archive.com">https://www.isic-archive.com</ext-link>) comprising 314 images, as depicted in <xref ref-type="table" rid="table-1">Table 1</xref>.</p>
<table-wrap id="table-1">
<label>Table 1</label>
<caption>
<title>Details of the dataset</title>
</caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th>Methods</th>
<th>No. of samples</th>
</tr>
</thead>
<tbody>
<tr>
<td>Angioma</td>
<td>21</td>
</tr>
<tr>
<td>Nevus</td>
<td>44</td>
</tr>
<tr>
<td>Lentigo NOS</td>
<td>40</td>
</tr>
<tr>
<td>Solar lentigo</td>
<td>67</td>
</tr>
<tr>
<td>Melanoma</td>
<td>51</td>
</tr>
<tr>
<td>Seborrheic keratosis</td>
<td>54</td>
</tr>
<tr>
<td>Basal cell carcinoma</td>
<td>37</td>
</tr>
<tr>
<td><bold>Total No. of samples</bold></td>
<td><bold>314</bold></td>
</tr>
</tbody>
</table>
</table-wrap>
<p><xref ref-type="fig" rid="fig-3">Fig. 3</xref> exhibits the classification outcomes of the SCSODTL-SCC method in terms of the confusion matrix. The results indicated that the SCSODTL-SCC method has accurately recognized different skin cancer classes under each aspect.</p>
<fig id="fig-3">
<label>Figure 3</label>
<caption>
<title>Confusion matrices of SCSODTL-SCC approach (a&#x2013;b) TR and TS databases of 80:20 and (c&#x2013;d) TR and TS databases of 70:30</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CSSE_38322-fig-3a.tif"/><graphic mimetype="image" mime-subtype="tif" xlink:href="CSSE_38322-fig-3b.tif"/>
</fig>
<p>In <xref ref-type="table" rid="table-2">Table 2</xref>, the skin cancer classification results of the SCSODTL-SCC method with 80:20 of TR/TS databases are provided. The figure shows the SCSODTL-SCC technique has reached enhanced results in every class. For example, on 80% of the TR database, the SCSODTL-SCC model has gained an average <inline-formula id="ieqn-50"><mml:math id="mml-ieqn-50"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 98.63%, <inline-formula id="ieqn-51"><mml:math id="mml-ieqn-51"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 94.66%, <inline-formula id="ieqn-52"><mml:math id="mml-ieqn-52"><mml:mi>a</mml:mi><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 99.18%, <inline-formula id="ieqn-53"><mml:math id="mml-ieqn-53"><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mrow><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">c</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">r</mml:mi><mml:mi mathvariant="italic">e</mml:mi></mml:mrow></mml:mrow></mml:msub></mml:math></inline-formula> of 95.20%, an MCC of 94.43%. Along with that, on 20% of the TS database, the SCSODTL-SCC method has obtained an average <inline-formula id="ieqn-54"><mml:math id="mml-ieqn-54"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 98.64%, <inline-formula id="ieqn-55"><mml:math id="mml-ieqn-55"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 92.86%, <inline-formula id="ieqn-56"><mml:math id="mml-ieqn-56"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 99.09%, <inline-formula id="ieqn-57"><mml:math id="mml-ieqn-57"><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mrow><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">c</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">r</mml:mi><mml:mi mathvariant="italic">e</mml:mi></mml:mrow></mml:mrow></mml:msub></mml:math></inline-formula> of 94.62%, an MCC of 94.19%.</p>
<table-wrap id="table-2">
<label>Table 2</label>
<caption>
<title>Skin cancer classification outcome of SCSODTL-SCC approach on 80:20 of TR/TS databases</title>
</caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th>Class</th>
<th>Accuracy</th>
<th>Sensitivity</th>
<th>Specificity</th>
<th>F-score</th>
<th>MCC</th>
</tr>
</thead>
<tbody>
<tr>
<td align="center" colspan="6">Training phase (80%)</td>
</tr>
<tr>
<td>Angioma</td>
<td>99.60</td>
<td>94.44</td>
<td>100.00</td>
<td>97.14</td>
<td>96.97</td>
</tr>
<tr>
<td>Nevus</td>
<td>98.80</td>
<td>95.12</td>
<td>99.52</td>
<td>96.30</td>
<td>95.59</td>
</tr>
<tr>
<td>Lentigo NOS</td>
<td>98.80</td>
<td>93.33</td>
<td>99.55</td>
<td>94.92</td>
<td>94.26</td>
</tr>
<tr>
<td>Solar lentigo</td>
<td>98.41</td>
<td>100.00</td>
<td>98.00</td>
<td>96.23</td>
<td>95.33</td>
</tr>
<tr>
<td>Melanoma</td>
<td>98.80</td>
<td>97.56</td>
<td>99.05</td>
<td>96.39</td>
<td>95.68</td>
</tr>
<tr>
<td>Seborrheic keratosis</td>
<td>97.61</td>
<td>92.86</td>
<td>98.56</td>
<td>92.86</td>
<td>91.42</td>
</tr>
<tr>
<td>Basal cell carcinoma</td>
<td>98.41</td>
<td>89.29</td>
<td>99.55</td>
<td>92.59</td>
<td>91.78</td>
</tr>
<tr>
<td><bold>Average</bold></td>
<td><bold>98.63</bold></td>
<td><bold>94.66</bold></td>
<td><bold>99.18</bold></td>
<td><bold>95.20</bold></td>
<td><bold>94.43</bold></td>
</tr>
<tr>
<td align="center" colspan="6">Testing phase (20%)</td>
</tr>
<tr>
<td>Angioma</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
</tr>
<tr>
<td>Nevus</td>
<td>98.41</td>
<td>66.67</td>
<td>100.00</td>
<td>80.00</td>
<td>80.98</td>
</tr>
<tr>
<td>Lentigo NOS</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
</tr>
<tr>
<td>Solar lentigo</td>
<td>95.24</td>
<td>100.00</td>
<td>93.62</td>
<td>91.43</td>
<td>88.79</td>
</tr>
<tr>
<td>Melanoma</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
</tr>
<tr>
<td>Seborrheic keratosis</td>
<td>96.83</td>
<td>83.33</td>
<td>100.00</td>
<td>90.91</td>
<td>89.55</td>
</tr>
<tr>
<td>Basal cell carcinoma</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
</tr>
<tr>
<td><bold>Average</bold></td>
<td><bold>98.64</bold></td>
<td><bold>92.86</bold></td>
<td><bold>99.09</bold></td>
<td><bold>94.62</bold></td>
<td><bold>94.19</bold></td>
</tr>
</tbody>
</table>
</table-wrap>
<p>In <xref ref-type="table" rid="table-3">Table 3</xref>, the skin cancer classification results of the SCSODTL-SCC method with 70:30 of TR/TS databases are provided. The figure exhibited the SCSODTL-SCC methodology has reached enhanced results in different class labels. For example, on 70% of the TR database, the SCSODTL-SCC approach has reached an average <inline-formula id="ieqn-58"><mml:math id="mml-ieqn-58"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 98.83%, <inline-formula id="ieqn-59"><mml:math id="mml-ieqn-59"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 95.31%, <inline-formula id="ieqn-60"><mml:math id="mml-ieqn-60"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 99.27%, <inline-formula id="ieqn-61"><mml:math id="mml-ieqn-61"><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mrow><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">c</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">r</mml:mi><mml:mi mathvariant="italic">e</mml:mi></mml:mrow></mml:mrow></mml:msub></mml:math></inline-formula> of 95.90%, and MCC of 95.22%. Also, on 30% of the TS database, the SCSODTL-SCC method has attained an average <inline-formula id="ieqn-62"><mml:math id="mml-ieqn-62"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 97.59%, <inline-formula id="ieqn-63"><mml:math id="mml-ieqn-63"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 90.70%, <inline-formula id="ieqn-64"><mml:math id="mml-ieqn-64"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 98.55%, <inline-formula id="ieqn-65"><mml:math id="mml-ieqn-65"><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mrow><mml:mi mathvariant="italic">s</mml:mi><mml:mi mathvariant="italic">c</mml:mi><mml:mi mathvariant="italic">o</mml:mi><mml:mi mathvariant="italic">r</mml:mi><mml:mi mathvariant="italic">e</mml:mi></mml:mrow></mml:mrow></mml:msub></mml:math></inline-formula> of 91.72%, and MCC of 90.52%.</p>
<table-wrap id="table-3">
<label>Table 3</label>
<caption>
<title>Skin cancer classification outcome of SCSODTL-SCC approach on 70:30 of TR/TS databases</title>
</caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th>Class</th>
<th>Accuracy</th>
<th>Sensitivity</th>
<th>Specificity</th>
<th>F-score</th>
<th>MCC</th>
</tr>
</thead>
<tbody>
<tr>
<td align="center" colspan="6">Training phase (70%)</td>
</tr>
<tr>
<td>Angioma</td>
<td>99.54</td>
<td>92.86</td>
<td>100.00</td>
<td>96.30</td>
<td>96.13</td>
</tr>
<tr>
<td>Nevus</td>
<td>99.09</td>
<td>96.88</td>
<td>99.47</td>
<td>96.88</td>
<td>96.34</td>
</tr>
<tr>
<td>Lentigo NOS</td>
<td>99.09</td>
<td>95.00</td>
<td>99.50</td>
<td>95.00</td>
<td>94.50</td>
</tr>
<tr>
<td>Solar lentigo</td>
<td>96.80</td>
<td>96.15</td>
<td>97.01</td>
<td>93.46</td>
<td>91.41</td>
</tr>
<tr>
<td>Melanoma</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
<td>100.00</td>
</tr>
<tr>
<td>Seborrheic keratosis</td>
<td>99.09</td>
<td>97.37</td>
<td>99.45</td>
<td>97.37</td>
<td>96.82</td>
</tr>
<tr>
<td>Basal cell carcinoma</td>
<td>98.17</td>
<td>88.89</td>
<td>99.48</td>
<td>92.31</td>
<td>91.36</td>
</tr>
<tr>
<td><bold>Average</bold></td>
<td><bold>98.83</bold></td>
<td><bold>95.31</bold></td>
<td><bold>99.27</bold></td>
<td><bold>95.90</bold></td>
<td><bold>95.22</bold></td>
</tr>
<tr>
<td align="center" colspan="6">Testing phase (30%)</td>
</tr>
<tr>
<td>Angioma</td>
<td>98.95</td>
<td>85.71</td>
<td>100.00</td>
<td>92.31</td>
<td>92.06</td>
</tr>
<tr>
<td>Nevus</td>
<td>97.89</td>
<td>83.33</td>
<td>100.00</td>
<td>90.91</td>
<td>90.21</td>
</tr>
<tr>
<td>Lentigo NOS</td>
<td>97.89</td>
<td>95.00</td>
<td>98.67</td>
<td>95.00</td>
<td>93.67</td>
</tr>
<tr>
<td>Solar lentigo</td>
<td>97.89</td>
<td>93.33</td>
<td>98.75</td>
<td>93.33</td>
<td>92.08</td>
</tr>
<tr>
<td>Melanoma</td>
<td>96.84</td>
<td>100.00</td>
<td>96.25</td>
<td>90.91</td>
<td>89.56</td>
</tr>
<tr>
<td>Seborrheic keratosis</td>
<td>94.74</td>
<td>87.50</td>
<td>96.20</td>
<td>84.85</td>
<td>81.72</td>
</tr>
<tr>
<td>Basal cell carcinoma</td>
<td>98.95</td>
<td>90.00</td>
<td>100.00</td>
<td>94.74</td>
<td>94.32</td>
</tr>
<tr>
<td><bold>Average</bold></td>
<td><bold>97.59</bold></td>
<td><bold>90.70</bold></td>
<td><bold>98.55</bold></td>
<td><bold>91.72</bold></td>
<td><bold>90.52</bold></td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The TACC and VACC of the SCSODTL-SCC methodology are examined on skin cancer classification performance in <xref ref-type="fig" rid="fig-4">Fig. 4</xref>. The results signified the SCSODTL-SCC approach has improved performance with increased values of TACC and VACC. Notably, the SCSODTL-SCC approach has maximum TACC outcomes.</p>
<fig id="fig-4">
<label>Figure 4</label>
<caption>
<title>TACC and VACC analysis of SCSODTL-SCC approach</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CSSE_38322-fig-4.tif"/>
</fig>
<p>The TLS and VLS of the SCSODTL-SCC method are tested on skin cancer classification performance in <xref ref-type="fig" rid="fig-5">Fig. 5</xref>. The outcomes inferred that the SCSODTL-SCC technique had revealed better performance with the least values of TLS and VLS. The SCSODTL-SCC technique has reduced VLS outcomes.</p>
<fig id="fig-5">
<label>Figure 5</label>
<caption>
<title>TLS and VLS analysis of SCSODTL-SCC approach</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CSSE_38322-fig-5.tif"/>
</fig>
<p>A clear precision-recall investigation of the SCSODTL-SCC method under the test database is seen in <xref ref-type="fig" rid="fig-6">Fig. 6</xref>. The outcomes exhibited the SCSODTL-SCC method has enhanced values of precision-recall values.</p>
<fig id="fig-6">
<label>Figure 6</label>
<caption>
<title>Precision-recall analysis of the SCSODTL-SCC approach</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CSSE_38322-fig-6.tif"/>
</fig>
<p>The detailed ROC investigation of the SCSODTL-SCC method under the test database is seen in <xref ref-type="fig" rid="fig-7">Fig. 7</xref>. The outcomes designated the SCSODTL-SCC approach have revealed its ability in classifying distinct classes under the test database.</p>
<fig id="fig-7">
<label>Figure 7</label>
<caption>
<title>ROC curve analysis of SCSODTL-SCC approach</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CSSE_38322-fig-7.tif"/>
</fig>
<p><xref ref-type="table" rid="table-4">Table 4</xref> reports an overall comparison study of the proposed model with other recent approaches [<xref ref-type="bibr" rid="ref-21">21</xref>]. <xref ref-type="fig" rid="fig-8">Fig. 8</xref> shows a comparative investigation of the SCSODTL-SCC model in terms of <inline-formula id="ieqn-66"><mml:math id="mml-ieqn-66"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>.</p>
<table-wrap id="table-4">
<label>Table 4</label>
<caption>
<title>Comparative analysis of the SCSODTL-SCC method with other techniques</title>
</caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th>Methods</th>
<th>Accuracy</th>
<th>Sensitivity</th>
<th>Specificity</th>
</tr>
</thead>
<tbody>
<tr>
<td>SCSODTL-SCC</td>
<td>98.83</td>
<td>95.31</td>
<td>99.27</td>
</tr>
<tr>
<td>DLCAL-SLDC</td>
<td>98.56</td>
<td>94.17</td>
<td>98.81</td>
</tr>
<tr>
<td>GNB model</td>
<td>95.14</td>
<td>93.27</td>
<td>95.14</td>
</tr>
<tr>
<td>DL-ANFC</td>
<td>97.54</td>
<td>93.24</td>
<td>98.63</td>
</tr>
<tr>
<td>CNN model</td>
<td>82.25</td>
<td>82.01</td>
<td>83.24</td>
</tr>
<tr>
<td>SVM model</td>
<td>74.35</td>
<td>72.8</td>
<td>75.72</td>
</tr>
<tr>
<td>RF model</td>
<td>88.56</td>
<td>86.88</td>
<td>85.72</td>
</tr>
<tr>
<td>DCCN-GC</td>
<td>93.07</td>
<td>90.46</td>
<td>92.59</td>
</tr>
</tbody>
</table>
</table-wrap><fig id="fig-8">
<label>Figure 8</label>
<caption>
<title><inline-formula id="ieqn-67"><mml:math id="mml-ieqn-67"><mml:mi>S</mml:mi><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> analysis of SCSODTL-SCC approach with other algorithms</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CSSE_38322-fig-8.tif"/>
</fig>
<p>The results indicated the SVM and CNN had reached reduced <inline-formula id="ieqn-68"><mml:math id="mml-ieqn-68"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 72.8% and 82.01%, while the RF and DCCN-GC models have reached slightly improved <inline-formula id="ieqn-69"><mml:math id="mml-ieqn-69"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 86.88% and 90.46%, respectively. Meanwhile, the DL-ANFC, GNB, and DLCAL-SLDC models have accomplished reasonable <inline-formula id="ieqn-70"><mml:math id="mml-ieqn-70"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> values of 93.24%, 93.27% and 94.17%, respectively. But the SCSODTL-SCC model has reported a maximum <inline-formula id="ieqn-71"><mml:math id="mml-ieqn-71"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 95.31%.</p>
<p><xref ref-type="fig" rid="fig-9">Fig. 9</xref> shows a comparative study of the SCSODTL-SCC model in terms of <inline-formula id="ieqn-72"><mml:math id="mml-ieqn-72"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>. The experimental results indicated the SVM and CNN had reached reduced <inline-formula id="ieqn-73"><mml:math id="mml-ieqn-73"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 75.72% and 83.24%, while the RF and DCCN-GC models have reached slightly improved <inline-formula id="ieqn-74"><mml:math id="mml-ieqn-74"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 85.72% and 92.59%, respectively. Meanwhile, the GNB, DL-ANFC and DLCAL-SLDC methods have accomplished reasonable <inline-formula id="ieqn-75"><mml:math id="mml-ieqn-75"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> values of 95.14%, 98.63% and 98.81%, correspondingly. But the SCSODTL-SCC model has reported a maximum <inline-formula id="ieqn-76"><mml:math id="mml-ieqn-76"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of 99.27%.</p>
<fig id="fig-9">
<label>Figure 9</label>
<caption>
<title><inline-formula id="ieqn-77"><mml:math id="mml-ieqn-77"><mml:mi>S</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> analysis of SCSODTL-SCC method with other algorithms</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CSSE_38322-fig-9.tif"/>
</fig>
<p>From these results, it is guaranteed that the SCSODTL-SCC method has reached enhanced skin cancer classification performance.</p>
</sec>
<sec id="s5">
<label>5</label>
<title>Conclusion</title>
<p>In this study, an automatic skin cancer classification using the SCSODTL-SCC technique has been developed on dermoscopic images. The goal of the SCSODTL-SCC model exists in the detection and classification of dissimilar kinds of skin cancer on dermoscopic images. Mainly, Dull razor approach-related hair removal and median filtering-based noise elimination are performed. Moreover, the U2Net segmentation approach is employed for the detection of infected lesion regions in dermoscopic images. Furthermore, the NASNetLarge-based feature extractor with the hybrid DBN model is used for classification. Finally, the classification performance can be improved by the SCSO algorithm for the hyperparameter tuning process. The simulation values of the SCSODTL-SCC model are inspected on the benchmark ISIC dataset. The comparative outcomes assured that the SCSODTL-SCC method had shown maximum skin cancer classification performance with maximum accuracy of 98.83%. In the future, the ensemble learning process can be employed to improve the classification results of the proposed model.</p>
</sec>
</body>
<back>
<ack>
<p>The authors thank for the seed grant support of Siddhartha Academy of General &#x0026; Technical Education and Velagapudi Ramakrishna Siddhartha Engineering College.</p>
</ack>
<sec><title>Funding Statement</title>
<p>This research was partly supported by the Technology Development Program of MSS [No. S3033853] and by the National University Development Project by the Ministry of Education in 2022.</p>
</sec>
<sec sec-type="COI-statement"><title>Conflicts of Interest</title>
<p>The authors declare that they have no conflicts of interest to report regarding the present study.</p>
</sec>
<ref-list content-type="authoryear">
<title>References</title>
<ref id="ref-1"><label>[1]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Dildar</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Akram</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Irfan</surname></string-name>, <string-name><given-names>H. U.</given-names> <surname>Khan</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Ramzan</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Skin cancer detection: A review using deep learning techniques</article-title>,&#x201D; <source>International Journal of Environmental Research and Public Health</source>, vol. <volume>18</volume>, no. <issue>10</issue>, pp. <fpage>5479</fpage>, <year>2021</year>; <pub-id pub-id-type="pmid">34065430</pub-id></mixed-citation></ref>
<ref id="ref-2"><label>[2]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Aljohani</surname></string-name> and <string-name><given-names>T.</given-names> <surname>Turki</surname></string-name></person-group>, &#x201C;<article-title>Automatic classification of melanoma skin cancer with deep convolutional neural networks</article-title>,&#x201D; <source>Artificial Intelligence</source>, vol. <volume>3</volume>, no. <issue>2</issue>, pp. <fpage>512</fpage>&#x2013;<lpage>525</lpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-3"><label>[3]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>H. C.</given-names> <surname>Reis</surname></string-name>, <string-name><given-names>V.</given-names> <surname>Turk</surname></string-name>, <string-name><given-names>K.</given-names> <surname>Khoshelham</surname></string-name> and <string-name><given-names>S.</given-names> <surname>Kaya</surname></string-name></person-group>, &#x201C;<article-title>InSiNet: A deep convolutional approach to skin cancer detection and segmentation</article-title>,&#x201D; <source>Medical &#x0026; Biological Engineering &#x0026; Computing</source>, vol. <volume>60</volume>, no. <issue>3</issue>, pp. <fpage>643</fpage>&#x2013;<lpage>662</lpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-4"><label>[4]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>N.</given-names> <surname>Kausar</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Hameed</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Sattar</surname></string-name>, <string-name><given-names>R.</given-names> <surname>Ashraf</surname></string-name>, <string-name><given-names>A. S.</given-names> <surname>Imran</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Multiclass skin cancer classification using ensemble of fine-tuned deep learning models</article-title>,&#x201D; <source>Applied Sciences</source>, vol. <volume>11</volume>, no. <issue>22</issue>, pp. <fpage>1</fpage>&#x2013;<lpage>20</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-5"><label>[5]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>T.</given-names> <surname>Saba</surname></string-name></person-group>, &#x201C;<article-title>Computer vision for microscopic skin cancer diagnosis using handcrafted and non-handcrafted features</article-title>,&#x201D; <source>Microscopy Research and Technique</source>, vol. <volume>84</volume>, no. <issue>6</issue>, pp. <fpage>1272</fpage>&#x2013;<lpage>1283</lpage>, <year>2021</year>; <pub-id pub-id-type="pmid">33399251</pub-id></mixed-citation></ref>
<ref id="ref-6"><label>[6]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A. G.</given-names> <surname>Pacheco</surname></string-name> and <string-name><given-names>R. A.</given-names> <surname>Krohling</surname></string-name></person-group>, &#x201C;<article-title>An attention-based mechanism to combine images and metadata in deep learning models applied to skin cancer classification</article-title>,&#x201D; <source>IEEE Journal of Biomedical and Health Informatics</source>, vol. <volume>25</volume>, no. <issue>9</issue>, pp. <fpage>3554</fpage>&#x2013;<lpage>3563</lpage>, <year>2021</year>; <pub-id pub-id-type="pmid">33635800</pub-id></mixed-citation></ref>
<ref id="ref-7"><label>[7]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Fraiwan</surname></string-name> and <string-name><given-names>E.</given-names> <surname>Faouri</surname></string-name></person-group>, &#x201C;<article-title>On the automatic detection and classification of skin cancer using deep transfer learning</article-title>,&#x201D; <source>Sensors</source>, vol. <volume>22</volume>, no. <issue>13</issue>, pp. <fpage>1</fpage>&#x2013;<lpage>16</lpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-8"><label>[8]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>T.</given-names> <surname>Saba</surname></string-name>, <string-name><given-names>M. A.</given-names> <surname>Khan</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Rehman</surname></string-name> and <string-name><given-names>S. L.</given-names> <surname>Marie-Sainte</surname></string-name></person-group>, &#x201C;<article-title>Region extraction and classification of skin cancer: A heterogeneous framework of deep CNN features fusion and reduction</article-title>,&#x201D; <source>Journal of Medical Systems</source>, vol. <volume>43</volume>, no. <issue>9</issue>, pp. <fpage>1</fpage>&#x2013;<lpage>19</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-9"><label>[9]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A. M.</given-names> <surname>Hemeida</surname></string-name>, <string-name><given-names>S. A.</given-names> <surname>Hassan</surname></string-name>, <string-name><given-names>A. A. A.</given-names> <surname>Mohamed</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Alkhalaf</surname></string-name>, <string-name><given-names>M. M.</given-names> <surname>Mahmoud</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Nature-inspired algorithms for feed-forward neural network classifiers: A survey of one decade of research</article-title>,&#x201D; <source>Ain Shams Engineering Journal</source>, vol. <volume>11</volume>, no. <issue>3</issue>, pp. <fpage>659</fpage>&#x2013;<lpage>675</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-10"><label>[10]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M. S.</given-names> <surname>Ali</surname></string-name>, <string-name><given-names>M. S.</given-names> <surname>Miah</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Haque</surname></string-name>, <string-name><given-names>M. M.</given-names> <surname>Rahman</surname></string-name> and <string-name><given-names>M. K.</given-names> <surname>Islam</surname></string-name></person-group>, &#x201C;<article-title>An enhanced technique of skin cancer classification using deep convolutional neural network with transfer learning models</article-title>,&#x201D; <source>Machine Learning with Applications</source>, vol. <volume>5</volume>, no. <issue>8</issue>, pp. <fpage>1</fpage>&#x2013;<lpage>8</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-11"><label>[11]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>G.</given-names> <surname>Reshma</surname></string-name>, <string-name><given-names>C.</given-names> <surname>Al-Atroshi</surname></string-name>, <string-name><given-names>V. K.</given-names> <surname>Nassa</surname></string-name>, <string-name><given-names>B.</given-names> <surname>Geetha</surname></string-name>, <string-name><given-names>G.</given-names> <surname>Sunitha</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Deep learning-based skin lesion diagnosis model using dermoscopic images</article-title>,&#x201D; <source>Intelligent Automation &#x0026; Soft Computing</source>, vol. <volume>31</volume>, no. <issue>1</issue>, pp. <fpage>621</fpage>&#x2013;<lpage>634</lpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-12"><label>[12]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S. M.</given-names> <surname>Alizadeh</surname></string-name> and <string-name><given-names>A.</given-names> <surname>Mahloojifar</surname></string-name></person-group>, &#x201C;<article-title>Automatic skin cancer detection in dermoscopy images by combining convolutional neural networks and texture features</article-title>,&#x201D; <source>International Journal of Imaging Systems and Technology</source>, vol. <volume>31</volume>, no. <issue>2</issue>, pp. <fpage>695</fpage>&#x2013;<lpage>707</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-13"><label>[13]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>T. C.</given-names> <surname>Lin</surname></string-name> and <string-name><given-names>H. C.</given-names> <surname>Lee</surname></string-name></person-group>, &#x201C;<article-title>Skin cancer dermoscopy images classification with meta data via deep learning ensemble</article-title>,&#x201D; in <conf-name>Int. Computer Symp. (ICS)</conf-name>, <publisher-loc>Tainan, Taiwan</publisher-loc>, pp. <fpage>237</fpage>&#x2013;<lpage>241</lpage>, <year>2020</year>. </mixed-citation></ref>
<ref id="ref-14"><label>[14]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Shorfuzzaman</surname></string-name></person-group>, &#x201C;<article-title>An explainable stacked ensemble of deep learning models for improved melanoma skin cancer detection</article-title>,&#x201D; <source>Multimedia Systems</source>, vol. <volume>28</volume>, no. <issue>4</issue>, pp. <fpage>1309</fpage>&#x2013;<lpage>1323</lpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-15"><label>[15]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>R.</given-names> <surname>Kaur</surname></string-name>, <string-name><given-names>H.</given-names> <surname>GholamHosseini</surname></string-name>, <string-name><given-names>R.</given-names> <surname>Sinha</surname></string-name> and <string-name><given-names>M.</given-names> <surname>Lind&#x00E9;n</surname></string-name></person-group>, &#x201C;<article-title>Melanoma classification using a novel deep convolutional neural network with dermoscopic images</article-title>,&#x201D; <source>Sensors</source>, vol. <volume>22</volume>, no. <issue>3</issue>, pp. <fpage>1</fpage>&#x2013;<lpage>15</lpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-16"><label>[16]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S. S.</given-names> <surname>Chaturvedi</surname></string-name>, <string-name><given-names>J. V.</given-names> <surname>Tembhurne</surname></string-name> and <string-name><given-names>T.</given-names> <surname>Diwan</surname></string-name></person-group>, &#x201C;<article-title>A multi-class skin cancer classification using deep convolutional neural networks</article-title>,&#x201D; <source>Multimedia Tools and Applications</source>, vol. <volume>79</volume>, no. <issue>39</issue>, pp. <fpage>28477</fpage>&#x2013;<lpage>28498</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-17"><label>[17]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>SM.</given-names> <surname>J.</surname></string-name>, <string-name><given-names>C.</given-names> <surname>Aravindan</surname></string-name> and <string-name><given-names>R.</given-names> <surname>Appavu</surname></string-name></person-group>, &#x201C;<article-title>Classification of skin cancer from dermoscopic images using deep neural network architectures</article-title>,&#x201D; <source>Multimedia Tools and Applications</source>, vol. <volume>82</volume>, pp. <fpage>1</fpage>&#x2013;<lpage>16</lpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-18"><label>[18]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>F.</given-names> <surname>Alrowais</surname></string-name>, <string-name><given-names>S. S.</given-names> <surname>Alotaibi</surname></string-name>, <string-name><given-names>F. N.</given-names> <surname>Al-Wesabi</surname></string-name>, <string-name><given-names>N.</given-names> <surname>Negm</surname></string-name>, <string-name><given-names>R.</given-names> <surname>Alabdan</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Deep transfer learning enabled intelligent object detection for crowd density analysis on video surveillance systems</article-title>,&#x201D; <source>Applied Sciences</source>, vol. <volume>12</volume>, no. <issue>13</issue>, pp. <fpage>6665</fpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-19"><label>[19]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K. A.</given-names> <surname>Alissa</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Shaiba</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Gaddah</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Yafoz</surname></string-name>, <string-name><given-names>R.</given-names> <surname>Alsini</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Feature subset selection hybrid deep belief network based cybersecurity intrusion detection model</article-title>,&#x201D; <source>Electronics</source>, vol. <volume>11</volume>, no. <issue>19</issue>, pp. <fpage>3077</fpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-20"><label>[20]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Y.</given-names> <surname>Li</surname></string-name> and <string-name><given-names>G.</given-names> <surname>Wang</surname></string-name></person-group>, &#x201C;<article-title>Sand cat swarm optimization based on stochastic variation with elite collaboration</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>10</volume>, pp. <fpage>89989</fpage>&#x2013;<lpage>90003</lpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-21"><label>[21]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>D.</given-names> <surname>Adla</surname></string-name>, <string-name><given-names>G. V. R.</given-names> <surname>Reddy</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Nayak</surname></string-name> and <string-name><given-names>G.</given-names> <surname>Karuna</surname></string-name></person-group>, &#x201C;<article-title>Deep learning-based computer aided diagnosis model for skin cancer detection and classification</article-title>,&#x201D; <source>Distributed and Parallel Databases</source>, vol. <volume>40</volume>, no. <issue>4</issue>, pp. <fpage>717</fpage>&#x2013;<lpage>736</lpage>, <year>2022</year>.</mixed-citation></ref>
</ref-list>
</back></article>