<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.1 20151215//EN" "http://jats.nlm.nih.gov/publishing/1.1/JATS-journalpublishing1.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="1.1">
<front>
<journal-meta>
<journal-id journal-id-type="pmc">IASC</journal-id>
<journal-id journal-id-type="nlm-ta">IASC</journal-id>
<journal-id journal-id-type="publisher-id">IASC</journal-id>
<journal-title-group>
<journal-title>Intelligent Automation &#x0026; Soft Computing</journal-title>
</journal-title-group>
<issn pub-type="epub">2326-005X</issn><issn pub-type="ppub">1079-8587</issn>
<publisher>
<publisher-name>Tech Science Press</publisher-name>
<publisher-loc>USA</publisher-loc>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">19117</article-id>
<article-id pub-id-type="doi">10.32604/iasc.2022.019117</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Article</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Deep Learning-Based Skin Lesion Diagnosis Model Using Dermoscopic Images</article-title><alt-title alt-title-type="left-running-head">Deep Learning-Based Skin Lesion Diagnosis Model Using Dermoscopic Images</alt-title><alt-title alt-title-type="right-running-head">Deep Learning-Based Skin Lesion Diagnosis Model Using Dermoscopic Images</alt-title>
</title-group>
<contrib-group content-type="authors">
<contrib id="author-1" contrib-type="author" corresp="yes">
<name name-style="western">
<surname>Reshma</surname>
<given-names>G.</given-names>
</name>
<xref ref-type="aff" rid="aff-1">1</xref>
<email>greshma@pvpsiddhartha.ac.in</email>
</contrib>
<contrib id="author-2" contrib-type="author">
<name name-style="western">
<surname>Al-Atroshi</surname>
<given-names>Chiai</given-names>
</name>
<xref ref-type="aff" rid="aff-2">2</xref>
</contrib>
<contrib id="author-3" contrib-type="author">
<name name-style="western">
<surname>Nassa</surname>
<given-names>Vinay Kumar</given-names>
</name>
<xref ref-type="aff" rid="aff-3">3</xref>
</contrib>
<contrib id="author-4" contrib-type="author">
<name name-style="western">
<surname>Geetha</surname>
<given-names>B.T.</given-names>
</name>
<xref ref-type="aff" rid="aff-4">4</xref>
</contrib>
<contrib id="author-5" contrib-type="author">
<name name-style="western">
<surname>Sunitha</surname>
<given-names>Gurram</given-names>
</name>
<xref ref-type="aff" rid="aff-5">5</xref>
</contrib>
<contrib id="author-6" contrib-type="author">
<name name-style="western">
<surname>Galety</surname>
<given-names>Mohammad Gouse</given-names>
</name>
<xref ref-type="aff" rid="aff-6">6</xref>
</contrib>
<contrib id="author-7" contrib-type="author">
<name name-style="western">
<surname>Neelakandan</surname>
<given-names>S.</given-names>
</name>
<xref ref-type="aff" rid="aff-7">7</xref>
</contrib>
<aff id="aff-1">
<label>1</label><institution>Department of Information Technology, P. V. P. Siddhartha Institute of Technology</institution>, <addr-line>Vijayawada, 520007</addr-line>, <country>India</country></aff>
<aff id="aff-2">
<label>2</label><institution>Department of Education Counselling, College of Basic Education University of Duhok</institution>, <addr-line>Duhok, 44001</addr-line>, <country>Iraq</country></aff>
<aff id="aff-3">
<label>3</label><institution>Department of Computer Science &#x0026; Engineering, South Point Group of Institutions, Sonipat</institution>, <addr-line>Haryana, 131001</addr-line>, <country>India</country></aff>
<aff id="aff-4">
<label>4</label><institution>Department of ECE, Saveetha School of Engineering, SIMATS, Saveetha University</institution>, <addr-line>Tamil Nadu, 602105</addr-line>, <country>India</country></aff>
<aff id="aff-5">
<label>5</label><institution>Department of CSE, Sree Vidyanikethan Engineering College</institution>, <addr-line>Tirupati, 517102</addr-line>, <country>India</country></aff>
<aff id="aff-6">
<label>6</label><institution>Department of Information Technology, College of Engineering, Catholic University in Erbil</institution>, <addr-line>Kurdistan Region, 44001</addr-line>, <country>Iraq</country></aff>
<aff id="aff-7">
<label>7</label><institution>Department of Information Technology, Jeppiaar Institute of Technology</institution>, <addr-line>601201</addr-line>, <country>India</country></aff>
</contrib-group><author-notes><corresp id="cor1">&#x002A;Corresponding Author: G. Reshma. Email: <email>greshma@pvpsiddhartha.ac.in</email></corresp></author-notes>
<pub-date pub-type="epub" date-type="pub" iso-8601-date="2021-08-21">
<day>21</day>
<month>8</month>
<year>2021</year>
</pub-date>
<volume>31</volume>
<issue>1</issue>
<fpage>621</fpage>
<lpage>634</lpage>
<history>
<date date-type="received">
<day>02</day>
<month>4</month>
<year>2021</year>
</date>
<date date-type="accepted">
<day>11</day>
<month>6</month>
<year>2021</year>
</date>
</history>
<permissions>
<copyright-statement>&#x00A9; 2021 Reshma et al.</copyright-statement>
<copyright-year>2021</copyright-year>
<copyright-holder>Reshma et al.</copyright-holder>
<license xlink:href="https://creativecommons.org/licenses/by/4.0/">
<license-p>This work is licensed under a <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution 4.0 International License</ext-link>, which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.</license-p>
</license>
</permissions>
<self-uri content-type="pdf" xlink:href="TSP_IASC_19117.pdf"></self-uri>
<abstract>
<p>In recent years, intelligent automation in the healthcare sector becomes more familiar due to the integration of artificial intelligence (AI) techniques. Intelligent healthcare systems assist in making better decisions, which further enable the patient to provide improved medical services. At the same time, skin lesion is a deadly disease that affects people of all age groups. Skin lesion segmentation and classification play a vital part in the earlier and precise skin cancer diagnosis by intelligent systems. However, the automated diagnosis of skin lesions in dermoscopic images is challenging because of the problems such as artifacts (hair, gel bubble, ruler marker), blurry boundary, poor contrast, and variable sizes and shapes of the lesion images. This study develops intelligent multilevel thresholding with deep learning (IMLT-DL) based skin lesion segmentation and classification model using dermoscopic images to address these problems. Primarily, the presented IMLT-DL model incorporates the Top hat filtering and inpainting technique for the pre-processing of the dermoscopic images. In addition, the Mayfly Optimization (MFO) with multilevel Kapur&#x2019;s thresholding-based segmentation process is involved in determining the infected regions. Besides, an Inception v3 based feature extractor is applied to derive a valuable set of feature vectors. Finally, the classification process is carried out using a gradient boosting tree (GBT) model. The presented model&#x2019;s performance takes place against the International Skin Imaging Collaboration (ISIC) dataset, and the experimental outcomes are inspected in different evaluation measures. The resultant experimental values ensure that the proposed IMLT-DL model outperforms the existing methods by achieving higher accuracy of 0.992.</p>
</abstract>
<kwd-group kwd-group-type="author">
<kwd>Intelligent models</kwd>
<kwd>computer-aided diagnosis</kwd>
<kwd>skin lesion</kwd>
<kwd>artificial intelligence</kwd>
<kwd>deep learning</kwd>
</kwd-group>
</article-meta>
</front>
<body>
<sec id="s1">
<label>1</label>
<title>Introduction</title>
<p>Skin cancer is a generally occurring kind of cancer over the globe [<xref ref-type="bibr" rid="ref-1">1</xref>]. Melanoma, squamous cell carcinoma, basal cell carcinoma, intraepithelial carcinoma, etc., are different kinds of skin cancers [<xref ref-type="bibr" rid="ref-2">2</xref>]. The human skin comprises three tissues, namely hypodermis, epidermis, and dermis. The epidermis has melanocytes that could create melanin at a highly unusual rate in any situation. For example, a long-term acquaintance of stronger ultraviolet radiation from light can cause melanin creation. The unusual development of melanocytes can cause a lethal kind of skin cancer [<xref ref-type="bibr" rid="ref-3">3</xref>]. The American Cancer Society in 2019 reported that it is anticipated that there would be around 96,480 new cases of melanoma and 7230 persons will be dead from the disease [<xref ref-type="bibr" rid="ref-4">4</xref>,<xref ref-type="bibr" rid="ref-5">5</xref>]. Earlier diagnoses of melanoma are essential for better treatment. When the melanoma was identified in the earlier phases, the 5-year survival rate becomes 92% [<xref ref-type="bibr" rid="ref-6">6</xref>].</p>
<p>Nevertheless, the resemblances among the malign and benign skin lesions are the central problem of melanoma detection. Consequently, detecting melanoma finds complicated for skilled professionals. It is difficult to determine the lesion kind with the human eye.</p>
<p>In recent years, distinct imaging techniques were utilized to capture skin images. Dermoscopy is a non-invasive imaging method that enables the skin surface&#x2019;s visual image by the immersion fluid and light magnification device [<xref ref-type="bibr" rid="ref-7">7</xref>,<xref ref-type="bibr" rid="ref-8">8</xref>]. However, the simple visualization for identifying melanoma in skin lesions might be subjective, irreproducible, or inaccurate because of knowledge that depends on the specialist. The predictive outcome of melanoma from the dermoscopic images by non-professionals lies in the range of 75%&#x2013;84%. In order to resolve these problems that exist in the melanoma diagnosis process, Computer-Aided Diagnosis (CAD) methods are required for assisting the professionals with the analysis system. The processes involved in the CAD model for melanoma identification involve pre-processing, segmentation, feature extraction, and classification. To effectively identify a melanoma, lesion segmentation is a crucial phase in the CAD system, but it becomes difficult because of considerable variations in texture, size, color, and position of the skin lesions in dermoscopic images. Besides, additional features like hair, ebony frames, air bubbles, color illumination, ruler marks, and blood vessels cause additional challenges to the lesion segmentation. Several techniques were presented for the segmentation of skin lesions. In recent times, Convolutional Neural Network (CNN) is one of the deep learning (DL) techniques, which have attained effective outcomes in the CAD model [<xref ref-type="bibr" rid="ref-9">9</xref>]. Some of the DL architectures are AlexNet, MobileNet, ResNet, etc. In this study, the Inception model is employed due to the following reasons. The Inception model has low computation efficiency and fewer parameters are realized. Besides, it offers high-performance gain, effective utilization of computing resources with a slight increase in computation load for the high-performance output of an Inception network.</p>
<p>This study designs an Intelligent Multilevel Thresholding with Deep Learning (IMLT-DL) based skin lesion segmentation and classification model using dermoscopic images. Principally, the presented IMLT-DL model integrates the Top hat filtering and inpainting technique for the pre-processing of the dermoscopic images. Moreover, the Mayfly Optimization (MFO) with multilevel Kapur&#x2019;s thresholding-based segmentation process is involved in determining the infected regions. Also, an Inception v3 based feature extractor is applied to generate a meaningful collection of feature vectors from the segmented image. Lastly, GBT model-based classification process is carried out to allocate proper class labels of the applied dermoscopic images. The proposed IMLT-DL model is simulated using International Skin Imaging Collaboration (ISIC) dataset and the experimental results are inspected under different evaluation measures. The paper&#x2019;s organization is given as follows: Section 2 reviews the state-of-art skin lesion segmentation techniques. Section 3 explains the proposed IMLT-DL model and section 4 validates the simulation results. At last, the conclusion of the IMLT-DL model is drawn.</p>
</sec>
<sec id="s2">
<label>2</label>
<title>Literature Review</title>
<p>This section reviews some of the existing skin lesion segmentation and classification models. Jaisakthi et al. [<xref ref-type="bibr" rid="ref-10">10</xref>] summarized a semi-supervised technique by combining the Grabcut and K-means clustering methods for segmenting the skin lesions. First, the graph cuts are used to segment the melanoma, and then K-means clustering fine-tuned the boundary of the lesion. The pre-processing methods like noise removal and image normalization processes are utilized on the input image, earlier serving to the pixel classification process. Agrawal et al. [<xref ref-type="bibr" rid="ref-11">11</xref>] used the scale-invariant feature transform method for feature extraction. Madaan et al. [<xref ref-type="bibr" rid="ref-12">12</xref>] implemented convolutional neural networks for medical image classification. Similarly, Aljanabi et al. [<xref ref-type="bibr" rid="ref-13">13</xref>] presented an artificial bee colony (ABC) technique for segmenting lesions. The swarm-based method includes pre-processing of the digital image. Subsequently, it determines the optimal threshold value of the melanoma by that lesion is segmentation by Otsu thresholding.</p>
<p>Pennisi et al. [<xref ref-type="bibr" rid="ref-14">14</xref>] presented a method that segments the Delaunay triangulation method&#x2019;s image (DTM). This technique includes a parallel segmentation method, which creates two different images that are later combined to obtain the last lesion mask. The artifacts are detached in the images and then one model filtering the skin from the image for providing a binary mask of lesions. The DTM method is automatic and does not need a trained model that can be quicker than other techniques. Bi et al. [<xref ref-type="bibr" rid="ref-15">15</xref>] presented a novel automatic technique that executes image segmentation by image-wise supervised learning (ISL) and multi-scale super pixel-based cellular automata (MSCA). The researchers utilized probability maps for automatic seed selection, which removes the user-defined seed selection; subsequently, the MSCA method is applied to segment the skin lesions. Bi et al. [<xref ref-type="bibr" rid="ref-16">16</xref>] presented a Fully Convolutional Network (FCN) based technique to segment the skin lesion. The image features are learned from embedding the multi-stage of the FCN and attained an enhanced segmentation accuracy (compared to earlier tasks) of skin lesion without applying all pre-processing portion (for example, contrast improvement, hair removal, and so on).</p>
<p>Yuan [<xref ref-type="bibr" rid="ref-17">17</xref>] presented a convolution deconvolution neural network (CDNN) for automating the method to segment skin lesions. This method has concentrated on trained approaches, making it highly effective against the utilization of several pre and post-processing. This method creates the probability mapping in which the components correspond to the possibility of pixels going to melanoma. Berseth [<xref ref-type="bibr" rid="ref-18">18</xref>] proposed a U-Net framework to segment the skin lesions depending upon probability mapping of the image dimension, whereas the 10-fold cross-validation system is utilized for training this method. Paulraj [<xref ref-type="bibr" rid="ref-19">19</xref>] introduced a DL method to extract the lesion parts from the skin lesion.</p>
</sec>
<sec id="s3">
<label>3</label>
<title>The Proposed Intelligent Skin Lesion Diagnosis Model</title>
<p>The system architecture of the presented IMLT-DT model is illustrated in <xref ref-type="fig" rid="fig-1">Fig. 1</xref>. The figure has shown that the IMLT-DT model diagnoses the skin lesion using different stages of operations such as pre-processing, segmentation, feature extraction, and classification. The detailed working of each operation is offered in the succeeding subsections.</p>
<sec id="s3_1">
<label>3.1</label>
<title>Image Pre-Processing</title>
<p>Initially, pre-processing of skin lesion images is performed in two stages, as defined below. Primarily, the format conversion and region of interest (RoI) detection processes are performed. As the existence of hair affects the detection and classification results, the hair removal process is carried out [<xref ref-type="bibr" rid="ref-20">20</xref>]. The RGB image is transformed into the grayscale image, and then the top hat filtering technique is utilized to identify the thick and dark hair in the dermoscopic images. The results obtained by the earlier processes comprise high variation among the input and output images, as given in <xref ref-type="disp-formula" rid="eqn-1">Eq. (1)</xref> below:</p>
<p><disp-formula id="eqn-1">
<label>(1)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-1.png"/><tex-math id="tex-eqn-1"><![$$CDATA[{Z_w}\left( G \right) = \; G \circ b\; - \; G$$]]></tex-math>--><mml:math id="mml-eqn-1" display="block"><mml:mrow><mml:msub><mml:mi>Z</mml:mi><mml:mi>w</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>G</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:mspace width="thickmathspace"></mml:mspace><mml:mi>G</mml:mi><mml:mo>&#x2218;</mml:mo><mml:mi>b</mml:mi><mml:mspace width="thickmathspace"></mml:mspace><mml:mo>&#x2212;</mml:mo><mml:mspace width="thickmathspace"></mml:mspace><mml:mi>G</mml:mi></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where &#x25CB; signifies the closing function, G represents the grayscale input image and b designates the grayscale designing component. Lastly, in the painting process, the hairline pixels are replaced with the nearby pixel values.</p>
<fig id="fig-1">
<label>Figure 1</label>
<caption>
<title>Overall process of proposed method</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_19117-fig-1.png"/>
</fig>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>MFO with Multilevel Thresholding-Based Segmentation</title>
<p>Once the dermoscopic input images are pre-processed, the MFO with multilevel Kapur&#x2019;s thresholding-based segmentation model is performed to determine the infected lesion regions in the dermoscopic images. Kapur et al. [<xref ref-type="bibr" rid="ref-21">21</xref>] presented an effective thresholding technique to determine the optimal thresholds for image segmentation. It mainly depends upon the entropy and thus the probability distribution of the image histogram. This technique computes the optimal (th) for the maximization of the overall entropy. In the case of bi-level thresholding, the objective function of Kapur&#x2019;s problem can be represented in <xref ref-type="disp-formula" rid="eqn-2">Eq. (2)</xref>:</p>
<p><disp-formula id="eqn-2">
<label>(2)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-2.png"/><tex-math id="tex-eqn-2"><![$$CDATA[{F_{kapur}}{\rm \; }\left( {th} \right) = {H_1} + {H_2}{\rm \; \; }$$]]></tex-math>--><mml:math id="mml-eqn-2" display="block"><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>k</mml:mi><mml:mi>a</mml:mi><mml:mi>p</mml:mi><mml:mi>u</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mrow><mml:mspace width="thickmathspace"></mml:mspace></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>t</mml:mi><mml:mi>h</mml:mi></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:mrow><mml:msub><mml:mi>H</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow><mml:mo>&#x002B;</mml:mo><mml:mrow><mml:msub><mml:mi>H</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mrow><mml:mspace width="thickmathspace"></mml:mspace><mml:mspace width="thickmathspace"></mml:mspace></mml:mrow></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where <inline-formula id="ieqn-1">
<!--<alternatives><inline-graphic xlink:href="ieqn-1.tif"/><tex-math id="tex-ieqn-1"><![$$CDATA[{H_1}$$]]></tex-math>--><mml:math id="mml-ieqn-1"><mml:mrow><mml:msub><mml:mi>H</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> and <inline-formula id="ieqn-2">
<!--<alternatives><inline-graphic xlink:href="ieqn-2.tif"/><tex-math id="tex-ieqn-2"><![$$CDATA[{H_2}$$]]></tex-math>--><mml:math id="mml-ieqn-2"><mml:mrow><mml:msub><mml:mi>H</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> can be computed as</p>
<p><disp-formula id="eqn-3">
<label>(3)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-3.png"/><tex-math id="tex-eqn-3"><![$$CDATA[{H_1} = \mathop \sum \nolimits_{i = 1}^{th} \displaystyle{{P{h_i}} \over {{\omega _0}}}ln\left( {\displaystyle{{P{h_i}} \over {{\omega _0}}}} \right)\; \; \;$$]]></tex-math>--><mml:math id="mml-eqn-3" display="block"><mml:mrow><mml:msub><mml:mi>H</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:msubsup><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>t</mml:mi><mml:mi>h</mml:mi></mml:mrow></mml:msubsup><mml:mstyle scriptlevel="0" displaystyle="true"><mml:mrow><mml:mfrac><mml:mrow><mml:mi>P</mml:mi><mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mrow><mml:mrow><mml:msub><mml:mi>&#x03C9;</mml:mi><mml:mn>0</mml:mn></mml:msub></mml:mrow></mml:mrow></mml:mfrac></mml:mrow><mml:mi>l</mml:mi><mml:mi>n</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mstyle scriptlevel="0" displaystyle="true"><mml:mrow><mml:mfrac><mml:mrow><mml:mi>P</mml:mi><mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mrow><mml:mrow><mml:msub><mml:mi>&#x03C9;</mml:mi><mml:mn>0</mml:mn></mml:msub></mml:mrow></mml:mrow></mml:mfrac></mml:mrow></mml:mstyle></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mspace width="thickmathspace"></mml:mspace><mml:mspace width="thickmathspace"></mml:mspace><mml:mspace width="thickmathspace"></mml:mspace></mml:mstyle></mml:math>
<!--</alternatives>--></disp-formula></p>
<p><disp-formula id="eqn-4">
<label>(4)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-4.png"/><tex-math id="tex-eqn-4"><![$$CDATA[{H_2} = \mathop \sum \nolimits_{i = th + 1}^L \displaystyle{{P{h_i}} \over {{\omega _1}}}ln\left( {\displaystyle{{P{h_i}} \over {{\omega _1}}}} \right)$$]]></tex-math>--><mml:math id="mml-eqn-4" display="block"><mml:mrow><mml:msub><mml:mi>H</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:msubsup><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mo>&#x002B;</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>L</mml:mi></mml:msubsup><mml:mstyle scriptlevel="0" displaystyle="true"><mml:mrow><mml:mfrac><mml:mrow><mml:mi>P</mml:mi><mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mrow><mml:mrow><mml:msub><mml:mi>&#x03C9;</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow></mml:mrow></mml:mfrac></mml:mrow><mml:mi>l</mml:mi><mml:mi>n</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mstyle scriptlevel="0" displaystyle="true"><mml:mrow><mml:mfrac><mml:mrow><mml:mi>P</mml:mi><mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mrow><mml:mrow><mml:msub><mml:mi>&#x03C9;</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow></mml:mrow></mml:mfrac></mml:mrow></mml:mstyle></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mstyle></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where <inline-formula id="ieqn-3">
<!--<alternatives><inline-graphic xlink:href="ieqn-3.tif"/><tex-math id="tex-ieqn-3"><![$$CDATA[P{h_i}$$]]></tex-math>--><mml:math id="mml-ieqn-3"><mml:mi>P</mml:mi><mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> is the probability distribution of the intensity level, <inline-formula id="ieqn-4">
<!--<alternatives><inline-graphic xlink:href="ieqn-4.tif"/><tex-math id="tex-ieqn-4"><![$$CDATA[{\omega _0}$$]]></tex-math>--><mml:math id="mml-ieqn-4"><mml:mrow><mml:msub><mml:mi>&#x03C9;</mml:mi><mml:mn>0</mml:mn></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> (th) and <inline-formula id="ieqn-5">
<!--<alternatives><inline-graphic xlink:href="ieqn-5.tif"/><tex-math id="tex-ieqn-5"><![$$CDATA[{\omega _1}$$]]></tex-math>--><mml:math id="mml-ieqn-5"><mml:mrow><mml:msub><mml:mi>&#x03C9;</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> (th) are probability distribution of the class labels <inline-formula id="ieqn-6">
<!--<alternatives><inline-graphic xlink:href="ieqn-6.tif"/><tex-math id="tex-ieqn-6"><![$$CDATA[{H_{\rm l}}$$]]></tex-math>--><mml:math id="mml-ieqn-6"><mml:mrow><mml:msub><mml:mi>H</mml:mi><mml:mrow><mml:mi mathvariant="normal">l</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> and <inline-formula id="ieqn-7">
<!--<alternatives><inline-graphic xlink:href="ieqn-7.tif"/><tex-math id="tex-ieqn-7"><![$$CDATA[{H_2}$$]]></tex-math>--><mml:math id="mml-ieqn-7"><mml:mrow><mml:msub><mml:mi>H</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> as shown in <xref ref-type="disp-formula" rid="eqn-3">Eqs. (3)</xref> and <xref ref-type="disp-formula" rid="eqn-4">(4)</xref>. This entropy-based technique can be extended for multilevel thresholding values. For example, it is essential to divide the images into <inline-formula id="ieqn-8">
<!--<alternatives><inline-graphic xlink:href="ieqn-8.tif"/><tex-math id="tex-ieqn-8"><![$$CDATA[k$$]]></tex-math>--><mml:math id="mml-ieqn-8"><mml:mi>k</mml:mi></mml:math>
<!--</alternatives>--></inline-formula> class labels using <inline-formula id="ieqn-9">
<!--<alternatives><inline-graphic xlink:href="ieqn-9.tif"/><tex-math id="tex-ieqn-9"><![$$CDATA[k - 1$$]]></tex-math>--><mml:math id="mml-ieqn-9"><mml:mi>k</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:math>
<!--</alternatives>--></inline-formula> threshold values [<xref ref-type="bibr" rid="ref-22">22</xref>]. The objective function can be altered using <xref ref-type="disp-formula" rid="eqn-5">Eq. (5)</xref>:</p>
<p><disp-formula id="eqn-5">
<label>(5)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-5.png"/><tex-math id="tex-eqn-5"><![$$CDATA[{F_{kapur}}{\rm \; }\left( {TH} \right) = \mathop \sum \nolimits_{i = {\rm l}}^k {H_i}$$]]></tex-math>--><mml:math id="mml-eqn-5" display="block"><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>k</mml:mi><mml:mi>a</mml:mi><mml:mi>p</mml:mi><mml:mi>u</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mrow><mml:mspace width="thickmathspace"></mml:mspace></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>T</mml:mi><mml:mi>H</mml:mi></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:msubsup><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mrow><mml:mi mathvariant="normal">l</mml:mi></mml:mrow></mml:mrow><mml:mi>k</mml:mi></mml:msubsup><mml:mo>&#x2061;</mml:mo><mml:mrow><mml:msub><mml:mi>H</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where <inline-formula id="ieqn-10">
<!--<alternatives><inline-graphic xlink:href="ieqn-10.tif"/><tex-math id="tex-ieqn-10"><![$$CDATA[TH =$$]]></tex-math>--><mml:math id="mml-ieqn-10"><mml:mi>T</mml:mi><mml:mi>H</mml:mi><mml:mo>&#x003D;</mml:mo></mml:math>
<!--</alternatives>--></inline-formula> <inline-formula id="ieqn-11">
<!--<alternatives><inline-graphic xlink:href="ieqn-11.tif"/><tex-math id="tex-ieqn-11"><![$$CDATA[\left[ {thl,\; th\_2,\; th\_\left( {k - 1} \right)} \right]\;$$]]></tex-math>--><mml:math id="mml-ieqn-11"><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mi>l</mml:mi><mml:mo>,</mml:mo><mml:mspace width="thickmathspace"></mml:mspace><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mi mathvariant="normal">_</mml:mi><mml:mn>2</mml:mn><mml:mo>,</mml:mo><mml:mspace width="thickmathspace"></mml:mspace><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mi mathvariant="normal">_</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>k</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mo>]</mml:mo></mml:mrow><mml:mspace width="thickmathspace"></mml:mspace></mml:math>
<!--</alternatives>--></inline-formula> is a vector comprising multiple threshold values. All the entropies are determined individually with the corresponding (<inline-formula id="ieqn-12">
<!--<alternatives><inline-graphic xlink:href="ieqn-12.tif"/><tex-math id="tex-ieqn-12"><![$$CDATA[th$$]]></tex-math>--><mml:math id="mml-ieqn-12"><mml:mi>t</mml:mi><mml:mi>h</mml:mi></mml:math>
<!--</alternatives>--></inline-formula>) value, so <xref ref-type="disp-formula" rid="eqn-6">Eq. (6)</xref> is extended for <inline-formula id="ieqn-13">
<!--<alternatives><inline-graphic xlink:href="ieqn-13.tif"/><tex-math id="tex-ieqn-13"><![$$CDATA[k$$]]></tex-math>--><mml:math id="mml-ieqn-13"><mml:mi>k</mml:mi></mml:math>
<!--</alternatives>--></inline-formula> entropy. <xref ref-type="fig" rid="fig-2">Fig. 2</xref> demonstrates the flowchart of the MFO technique.</p>
<fig id="fig-2">
<label>Figure 2</label>
<caption>
<title>Architecture of inception V3 model</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_19117-fig-2.png"/>
</fig>
<p><disp-formula id="eqn-6">
<label>(6)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-6.png"/><tex-math id="tex-eqn-6"><![$$CDATA[H_k^c = \mathop \sum \nolimits_{i = t{h_{k + 1}}}^L \displaystyle{{P{h_i}} \over {{\omega _{k - {\rm l}}}}}ln\left( {\displaystyle{{P{h_i}} \over {{\omega _{k - 1}}}}} \right)$$]]></tex-math>--><mml:math id="mml-eqn-6" display="block"><mml:msubsup><mml:mi>H</mml:mi><mml:mi>k</mml:mi><mml:mi>c</mml:mi></mml:msubsup><mml:mo>&#x003D;</mml:mo><mml:msubsup><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mi>t</mml:mi><mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mrow><mml:mi>k</mml:mi><mml:mo>&#x002B;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mi>L</mml:mi></mml:msubsup><mml:mstyle scriptlevel="0" displaystyle="true"><mml:mrow><mml:mfrac><mml:mrow><mml:mi>P</mml:mi><mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mrow><mml:mrow><mml:msub><mml:mi>&#x03C9;</mml:mi><mml:mrow><mml:mi>k</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mi mathvariant="normal">l</mml:mi></mml:mrow></mml:mrow></mml:msub></mml:mrow></mml:mrow></mml:mfrac></mml:mrow><mml:mi>l</mml:mi><mml:mi>n</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mstyle scriptlevel="0" displaystyle="true"><mml:mrow><mml:mfrac><mml:mrow><mml:mi>P</mml:mi><mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mrow><mml:mrow><mml:msub><mml:mi>&#x03C9;</mml:mi><mml:mrow><mml:mi>k</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow></mml:mrow></mml:mfrac></mml:mrow></mml:mstyle></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mstyle></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where the values of the probability occurrence <inline-formula id="ieqn-14">
<!--<alternatives><inline-graphic xlink:href="ieqn-14.tif"/><tex-math id="tex-ieqn-14"><![$$CDATA[\left( {\omega _0^c,{\rm \; }{\omega _1},{\rm \; } \ldots ,{\rm \; }{\omega _{k - 1}}} \right)$$]]></tex-math>--><mml:math id="mml-ieqn-14"><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msubsup><mml:mi>&#x03C9;</mml:mi><mml:mn>0</mml:mn><mml:mi>c</mml:mi></mml:msubsup><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace"></mml:mspace></mml:mrow><mml:mrow><mml:msub><mml:mi>&#x03C9;</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace"></mml:mspace></mml:mrow><mml:mo>&#x2026;</mml:mo><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace"></mml:mspace></mml:mrow><mml:mrow><mml:msub><mml:mi>&#x03C9;</mml:mi><mml:mrow><mml:mi>k</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> of the <inline-formula id="ieqn-15">
<!--<alternatives><inline-graphic xlink:href="ieqn-15.tif"/><tex-math id="tex-ieqn-15"><![$$CDATA[k$$]]></tex-math>--><mml:math id="mml-ieqn-15"><mml:mi>k</mml:mi></mml:math>
<!--</alternatives>--></inline-formula> class levels are attained, for the optimal selection of multiple threshold values, the MFO algorithm is applied.</p>
<p>The MFO algorithm is stimulated by the flighting nature and mating process of the mayflies [<xref ref-type="bibr" rid="ref-23">23</xref>]. In the MFO algorithm, the individuals in swarms are particularly recognized as male and female mayflies. The male MFs are generally robust and results in improved optimization. The MFO algorithm update the position based on the existing positions <inline-formula id="ieqn-16">
<!--<alternatives><inline-graphic xlink:href="ieqn-16.tif"/><tex-math id="tex-ieqn-16"><![$$CDATA[{p_i}\left( t \right)$$]]></tex-math>--><mml:math id="mml-ieqn-16"><mml:mrow><mml:msub><mml:mi>p</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> and velocity <inline-formula id="ieqn-17">
<!--<alternatives><inline-graphic xlink:href="ieqn-17.tif"/><tex-math id="tex-ieqn-17"><![$$CDATA[{v_i}\left( t \right)$$]]></tex-math>--><mml:math id="mml-ieqn-17"><mml:mrow><mml:msub><mml:mi>v</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> at the present round:</p>
<p><disp-formula id="eqn-7">
<label>(7)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-7.png"/><tex-math id="tex-eqn-7"><![$$CDATA[{p_i}\left( {t + 1} \right) = {p_i}\left( t \right) + {v_i}\left( {t + 1} \right)\;$$]]></tex-math>--><mml:math id="mml-eqn-7" display="block"><mml:mrow><mml:msub><mml:mi>p</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x002B;</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:mrow><mml:msub><mml:mi>p</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x002B;</mml:mo><mml:mrow><mml:msub><mml:mi>v</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x002B;</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mspace width="thickmathspace"></mml:mspace></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>Every male and female MFs update the respective position using <xref ref-type="disp-formula" rid="eqn-7">Eq. (7)</xref>. However, the MFs involve distinct velocity updating characteristics.</p>
<sec id="s3_2_1">
<label>3.2.1</label>
<title>Movement of Male MFs</title>
<p>Male MFs in the swarm perform exploration or exploitation processes over iterations. The velocity gets updated based on the present fitness values <inline-formula id="ieqn-18">
<!--<alternatives><inline-graphic xlink:href="ieqn-18.tif"/><tex-math id="tex-ieqn-18"><![$$CDATA[\left( {of\; {x_i}} \right)$$]]></tex-math>--><mml:math id="mml-ieqn-18"><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>o</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thickmathspace"></mml:mspace><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> and the past optimal fitness value in trajectory <inline-formula id="ieqn-19">
<!--<alternatives><inline-graphic xlink:href="ieqn-19.tif"/><tex-math id="tex-ieqn-19"><![$$CDATA[f\left( {{x_{_{}}}} \right)$$]]></tex-math>--><mml:math id="mml-ieqn-19"><mml:mi>f</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:msub><mml:mi></mml:mi><mml:mrow></mml:mrow></mml:msub></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula>. When <inline-formula id="ieqn-20">
<!--<alternatives><inline-graphic xlink:href="ieqn-20.tif"/><tex-math id="tex-ieqn-20"><![$$CDATA[f\left( {{x_i}} \right) > f\left( {{x_{{h_i}}}} \right)$$]]></tex-math>--><mml:math id="mml-ieqn-20"><mml:mi>f</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x003E;</mml:mo><mml:mi>f</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula>, the male MFs update the velocity based on the current velocity along with the distance among them and gbest position, the past optimal trajectory:</p>
<p><disp-formula id="eqn-8">
<label>(8)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-8.png"/><tex-math id="tex-eqn-8"><![$$CDATA[{v_i}\left( {t + 1} \right) = {\rm g} \cdot {v_i}\left( t \right) + {a_1}{e^{ - \beta r_p^2}}{\rm \; }\left[ {{x_{{h_i}}} - {x_i}\left( t \right)} \right] + {a_2}{e^{ - \beta r_q^2}},{\rm \; }\left[ {{x_g} - {x_i}\left( t \right)} \right]\;$$]]></tex-math>--><mml:math id="mml-eqn-8" display="block"><mml:mrow><mml:msub><mml:mi>v</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x002B;</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:mrow><mml:mi mathvariant="normal">g</mml:mi></mml:mrow><mml:msup><mml:mtext> </mml:mtext><mml:mo>.</mml:mo></mml:msup><mml:mtext> </mml:mtext><mml:mrow><mml:msub><mml:mi>v</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x002B;</mml:mo><mml:mrow><mml:msub><mml:mi>a</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow><mml:mrow><mml:msup><mml:mi>e</mml:mi><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>&#x03B2;</mml:mi><mml:msubsup><mml:mi>r</mml:mi><mml:mi>p</mml:mi><mml:mn>2</mml:mn></mml:msubsup></mml:mrow></mml:msup></mml:mrow><mml:mrow><mml:mspace width="thickmathspace"></mml:mspace></mml:mrow><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mo>]</mml:mo></mml:mrow><mml:mo>&#x002B;</mml:mo><mml:mrow><mml:msub><mml:mi>a</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mrow><mml:msup><mml:mi>e</mml:mi><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>&#x03B2;</mml:mi><mml:msubsup><mml:mi>r</mml:mi><mml:mi>q</mml:mi><mml:mn>2</mml:mn></mml:msubsup></mml:mrow></mml:msup></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mspace width="thickmathspace"></mml:mspace></mml:mrow><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>g</mml:mi></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mo>]</mml:mo></mml:mrow><mml:mspace width="thickmathspace"></mml:mspace></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where <inline-formula id="ieqn-21">
<!--<alternatives><inline-graphic xlink:href="ieqn-21.tif"/><tex-math id="tex-ieqn-21"><![$$CDATA[g$$]]></tex-math>--><mml:math id="mml-ieqn-21"><mml:mi>g</mml:mi></mml:math>
<!--</alternatives>--></inline-formula> is a variable reduced from maximum to 1 in a linear way. <inline-formula id="ieqn-22">
<!--<alternatives><inline-graphic xlink:href="ieqn-22.tif"/><tex-math id="tex-ieqn-22"><![$$CDATA[{a_1},$$]]></tex-math>--><mml:math id="mml-ieqn-22"><mml:mrow><mml:msub><mml:mi>a</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow><mml:mo>,</mml:mo></mml:math>
<!--</alternatives>--></inline-formula> <inline-formula id="ieqn-23">
<!--<alternatives><inline-graphic xlink:href="ieqn-23.tif"/><tex-math id="tex-ieqn-23"><![$$CDATA[{a_2}$$]]></tex-math>--><mml:math id="mml-ieqn-23"><mml:mrow><mml:msub><mml:mi>a</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula>, and <inline-formula id="ieqn-24">
<!--<alternatives><inline-graphic xlink:href="ieqn-24.tif"/><tex-math id="tex-ieqn-24"><![$$CDATA[\beta$$]]></tex-math>--><mml:math id="mml-ieqn-24"><mml:mi>&#x03B2;</mml:mi></mml:math>
<!--</alternatives>--></inline-formula> are the constants. <inline-formula id="ieqn-25">
<!--<alternatives><inline-graphic xlink:href="ieqn-25.tif"/><tex-math id="tex-ieqn-25"><![$$CDATA[{r_{\rm p}}$$]]></tex-math>--><mml:math id="mml-ieqn-25"><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi mathvariant="normal">p</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> and <inline-formula id="ieqn-26">
<!--<alternatives><inline-graphic xlink:href="ieqn-26.tif"/><tex-math id="tex-ieqn-26"><![$$CDATA[{r_g}$$]]></tex-math>--><mml:math id="mml-ieqn-26"><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mi>g</mml:mi></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> are two parameters denoting the Cartesian distance among the individuals and its past optimal position, the gbest position in swarms. The Cartesian distance is the 2<sup>nd</sup> norm for the distance array:</p>
<p><disp-formula id="eqn-9">
<label>(9)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-9.png"/><tex-math id="tex-eqn-9"><![$$CDATA[\left| {\left| {{x_i} - {x_j}} \right|} \right| = \sqrt {\mathop \sum \nolimits_{k = 1}^n {{\left( {{x_{ik}} - {x_{jk}}} \right)}^2}} \;$$]]></tex-math>--><mml:math id="mml-eqn-9" display="block"><mml:mrow><mml:mo>|</mml:mo><mml:mrow><mml:mrow><mml:mo>|</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>|</mml:mo></mml:mrow></mml:mrow><mml:mo>|</mml:mo></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:msqrt><mml:msubsup><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>k</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>n</mml:mi></mml:msubsup><mml:mo>&#x2061;</mml:mo><mml:mrow><mml:msup><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>k</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mi>k</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:msqrt><mml:mspace width="thickmathspace"></mml:mspace></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>At the same time, when <inline-formula id="ieqn-27">
<!--<alternatives><inline-graphic xlink:href="ieqn-27.tif"/><tex-math id="tex-ieqn-27"><![$$CDATA[f\left( {{x_i}} \right) < f\left( {{x_{{h_i}}}} \right)$$]]></tex-math>--><mml:math id="mml-ieqn-27"><mml:mi>f</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x003C;</mml:mo><mml:mi>f</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula>, the male MFs update the velocities from the presented one with a random dance coefficient <inline-formula id="ieqn-28">
<!--<alternatives><inline-graphic xlink:href="ieqn-28.tif"/><tex-math id="tex-ieqn-28"><![$$CDATA[d$$]]></tex-math>--><mml:math id="mml-ieqn-28"><mml:mi>d</mml:mi></mml:math>
<!--</alternatives>--></inline-formula>:</p>
<p><disp-formula id="eqn-10">
<label>(10)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-10.png"/><tex-math id="tex-eqn-10"><![$$CDATA[{v_i}\left( {t + 1} \right) = g \cdot {v_i}\left( t \right) + d \cdot {r_1}\; \; \; \;$$]]></tex-math>--><mml:math id="mml-eqn-10" display="block"><mml:mrow><mml:msub><mml:mi>v</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x002B;</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:mi>g</mml:mi><mml:msup><mml:mtext> </mml:mtext><mml:mo>.</mml:mo></mml:msup><mml:mtext> </mml:mtext><mml:mrow><mml:msub><mml:mi>v</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x002B;</mml:mo><mml:mi>d</mml:mi><mml:msup><mml:mtext> </mml:mtext><mml:mo>.</mml:mo></mml:msup><mml:mtext> </mml:mtext><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow><mml:mspace width="thickmathspace"></mml:mspace><mml:mspace width="thickmathspace"></mml:mspace><mml:mspace width="thickmathspace"></mml:mspace><mml:mspace width="thickmathspace"></mml:mspace></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where <inline-formula id="ieqn-29">
<!--<alternatives><inline-graphic xlink:href="ieqn-29.tif"/><tex-math id="tex-ieqn-29"><![$$CDATA[{r_1}$$]]></tex-math>--><mml:math id="mml-ieqn-29"><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> is the arbitrary number in uniform distribution.</p>
</sec>
<sec id="s3_2_2">
<label>3.2.2</label>
<title>Movement of Female MFs</title>
<p>The female MFs update the velocity in various ways. The female MFs with wings only endure for 1&#x2013;7 days. Therefore, the female MFs rushed to identify the MFs for mating and reproduction. So, the velocity gets updated depending upon the male MFs they wish to mate. Here, the topmost female and male MFs are considered the first mate and the next optimal female; male MFs are treated as the second mate, etc. Therefore, for the ith female mayfly, when <inline-formula id="ieqn-30">
<!--<alternatives><inline-graphic xlink:href="ieqn-30.tif"/><tex-math id="tex-ieqn-30"><![$$CDATA[f\left( {{y_i}} \right) < f\left( {{x_i}} \right)$$]]></tex-math>--><mml:math id="mml-ieqn-30"><mml:mi>f</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>y</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x003C;</mml:mo><mml:mi>f</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula>:</p>
<p><disp-formula id="eqn-11">
<label>(11)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-11.png"/><tex-math id="tex-eqn-11"><![$$CDATA[{v_i}\left( {t + 1} \right) = g \cdot {v_i}\left( t \right) + {a_3}{e^{ - \beta r_{mf}^2}}{\rm \; }\left[ {{x_i}\left( t \right) - {y_i}\left( t \right)} \right]$$]]></tex-math>--><mml:math id="mml-eqn-11" display="block"><mml:mrow><mml:msub><mml:mi>v</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x002B;</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:mi>g</mml:mi><mml:msup><mml:mtext> </mml:mtext><mml:mo>.</mml:mo></mml:msup><mml:mtext> </mml:mtext><mml:mrow><mml:msub><mml:mi>v</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x002B;</mml:mo><mml:mrow><mml:msub><mml:mi>a</mml:mi><mml:mn>3</mml:mn></mml:msub></mml:mrow><mml:mrow><mml:msup><mml:mi>e</mml:mi><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>&#x03B2;</mml:mi><mml:msubsup><mml:mi>r</mml:mi><mml:mrow><mml:mi>m</mml:mi><mml:mi>f</mml:mi></mml:mrow><mml:mn>2</mml:mn></mml:msubsup></mml:mrow></mml:msup></mml:mrow><mml:mrow><mml:mspace width="thickmathspace"></mml:mspace></mml:mrow><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mi>y</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where <inline-formula id="ieqn-31">
<!--<alternatives><inline-graphic xlink:href="ieqn-31.tif"/><tex-math id="tex-ieqn-31"><![$$CDATA[{a_3}$$]]></tex-math>--><mml:math id="mml-ieqn-31"><mml:mrow><mml:msub><mml:mi>a</mml:mi><mml:mn>3</mml:mn></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> represents the constant employed for balancing the velocity and <inline-formula id="ieqn-32">
<!--<alternatives><inline-graphic xlink:href="ieqn-32.tif"/><tex-math id="tex-ieqn-32"><![$$CDATA[{r_m}$$]]></tex-math>--><mml:math id="mml-ieqn-32"><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mi>m</mml:mi></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> denotes the Cartesian distance among them. Contrastingly, when <inline-formula id="ieqn-33">
<!--<alternatives><inline-graphic xlink:href="ieqn-33.tif"/><tex-math id="tex-ieqn-33"><![$$CDATA[\left( {{y_i}} \right) < f\left( {{x_i}} \right)$$]]></tex-math>--><mml:math id="mml-ieqn-33"><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>y</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x003C;</mml:mo><mml:mi>f</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula>, the female MFs update the velocity from the existing one with another arbitrary dance <inline-formula id="ieqn-34">
<!--<alternatives><inline-graphic xlink:href="ieqn-34.tif"/><tex-math id="tex-ieqn-34"><![$$CDATA[fl$$]]></tex-math>--><mml:math id="mml-ieqn-34"><mml:mi>f</mml:mi><mml:mi>l</mml:mi></mml:math>
<!--</alternatives>--></inline-formula>:</p>
<p><disp-formula id="eqn-12">
<label>(12)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-12.png"/><tex-math id="tex-eqn-12"><![$$CDATA[{v_i}\left( t \right) = g \cdot {v_i}\left( t \right) + fl \cdot {r_2}\; \; \;$$]]></tex-math>--><mml:math id="mml-eqn-12" display="block"><mml:mrow><mml:msub><mml:mi>v</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:mi>g</mml:mi><mml:msup><mml:mtext> </mml:mtext><mml:mo>.</mml:mo></mml:msup><mml:mtext> </mml:mtext><mml:mrow><mml:msub><mml:mi>v</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x002B;</mml:mo><mml:mi>f</mml:mi><mml:mi>l</mml:mi><mml:msup><mml:mtext> </mml:mtext><mml:mo>.</mml:mo></mml:msup><mml:mtext> </mml:mtext><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mspace width="thickmathspace"></mml:mspace><mml:mspace width="thickmathspace"></mml:mspace><mml:mspace width="thickmathspace"></mml:mspace></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where <inline-formula id="ieqn-35">
<!--<alternatives><inline-graphic xlink:href="ieqn-35.tif"/><tex-math id="tex-ieqn-35"><![$$CDATA[{r_2}$$]]></tex-math>--><mml:math id="mml-ieqn-35"><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> is the arbitrary number in uniform distribution.</p>
</sec>
<sec id="s3_2_3">
<label>3.2.3</label>
<title>MFs Mating</title>
<p>The top half of the male and female MFs undergo mating and reproduce children. The offsprings are arbitrarily developed from the respective parents as defined below:</p>
<p><disp-formula id="eqn-13">
<label>(13)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-13.png"/><tex-math id="tex-eqn-13"><![$$CDATA[offspring1 = L*male + \left( {1 - L} \right)*female\;$$]]></tex-math>--><mml:math id="mml-eqn-13" display="block"><mml:mi>o</mml:mi><mml:mi>f</mml:mi><mml:mi>f</mml:mi><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>r</mml:mi><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mi>g</mml:mi><mml:mn>1</mml:mn><mml:mo>&#x003D;</mml:mo><mml:mi>L</mml:mi><mml:mo>&#x2217;</mml:mo><mml:mi>m</mml:mi><mml:mi>a</mml:mi><mml:mi>l</mml:mi><mml:mi>e</mml:mi><mml:mo>&#x002B;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:mi>L</mml:mi></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2217;</mml:mo><mml:mi>f</mml:mi><mml:mi>e</mml:mi><mml:mi>m</mml:mi><mml:mi>a</mml:mi><mml:mi>l</mml:mi><mml:mi>e</mml:mi><mml:mspace width="thickmathspace"></mml:mspace></mml:math>
<!--</alternatives>--></disp-formula></p>
<p><disp-formula id="eqn-14">
<label>(14)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-14.png"/><tex-math id="tex-eqn-14"><![$$CDATA[offspring2 = L*female + \left( {1 - L} \right)*male\; \; \;$$]]></tex-math>--><mml:math id="mml-eqn-14" display="block"><mml:mi>o</mml:mi><mml:mi>f</mml:mi><mml:mi>f</mml:mi><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>r</mml:mi><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mi>g</mml:mi><mml:mn>2</mml:mn><mml:mo>&#x003D;</mml:mo><mml:mi>L</mml:mi><mml:mo>&#x2217;</mml:mo><mml:mi>f</mml:mi><mml:mi>e</mml:mi><mml:mi>m</mml:mi><mml:mi>a</mml:mi><mml:mi>l</mml:mi><mml:mi>e</mml:mi><mml:mo>&#x002B;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:mi>L</mml:mi></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2217;</mml:mo><mml:mi>m</mml:mi><mml:mi>a</mml:mi><mml:mi>l</mml:mi><mml:mi>e</mml:mi><mml:mspace width="thickmathspace"></mml:mspace><mml:mspace width="thickmathspace"></mml:mspace><mml:mspace width="thickmathspace"></mml:mspace></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where L is arbitrary numbers in Gauss distribution.</p>
</sec>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Feature Extraction</title>
<p>The segmented image is passed into the Inception v3 model during feature extraction, which has generated a meaningful set of feature vectors. Krizhevsky et al. [<xref ref-type="bibr" rid="ref-24">24</xref>] proposed the AlexNet model for object recognition and classification, and it has achieved improved performance. Followed by, different convolutional techniques are developed for the minimization of the Top-5 error rate of object recognition and classification. On comparing with the GoogleNet (Inception-v1) model, the Inception-v3 model has achieved improved performance. Notably, it has three parts: fundamental convolution block, enhanced Inception block, and classification block. <xref ref-type="fig" rid="fig-3">Fig. 3</xref> illustrates the structure of the Inception V3 model.</p>
<p>The fundamental convolution block, which alternates the convolution with max-pooling layers, is employed to extract the features. Then, the enhanced Inception block is developed using the Network-In-Network [<xref ref-type="bibr" rid="ref-25">25</xref>], where multi-scale convolution operations are performed simultaneously, and the convolution outcome of every branch undergoes concatenation. Because of the utilization of a secondary classifier, highly stable outcomes and better gradient convergence can be accomplished, and concurrently disappearing the gradients, and overfitting problems are also discarded. In Inception-v3, one &#x00D7; one convolution kernel is commonly employed for reducing the feature channel count and speed up the training speed. Moreover, the decomposition of large convolutions into small ones also minimizes the number of parameters and computational complexity. Therefore, the Inception v3 model is applied to extract the features from the dermoscopic images.</p>
</sec>
<sec id="s3_4">
<label>3.4</label>
<title>Image Classification</title>
<p>At the final stage of image classification, the extracted feature vectors from the Inception v3 model are feed as input to the GBT model to define the presence of skin lesions, <italic>i.e</italic>., allocate proper class labels of the applied dermoscopic images. The GBT model is trained using the XGBoost using the features obtained in the earlier process [<xref ref-type="bibr" rid="ref-26">26</xref>,<xref ref-type="bibr" rid="ref-27">27</xref>]. The GBT model is non-variant to input scaling, and it learns higher-order interaction among the features. In addition, the GBT model undergoes training in an additive way. At every particular time step <inline-formula id="ieqn-36">
<!--<alternatives><inline-graphic xlink:href="ieqn-36.tif"/><tex-math id="tex-ieqn-36"><![$$CDATA[t$$]]></tex-math>--><mml:math id="mml-ieqn-36"><mml:mi>t</mml:mi></mml:math>
<!--</alternatives>--></inline-formula>, it grows another tree for minimizing the residuals of the present model. The objective function is defined using <xref ref-type="disp-formula" rid="eqn-15">Eq. (15)</xref>:</p>
<p><disp-formula id="eqn-15">
<label>(15)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-15.png"/><tex-math id="tex-eqn-15"><![$$CDATA[{{\rm {\cal L}}^{\left( t \right)}} = \mathop \sum \limits_{i = 1}^n l\left( {{y_i},\hat y_i^{t - 1} + {f_t}\left( {{x_i}} \right)} \right) + {\Omega }\left( {{f_t}} \right),$$]]></tex-math>--><mml:math id="mml-eqn-15" display="block"><mml:mrow><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="script">L</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:msup></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:munderover><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>n</mml:mi></mml:munderover><mml:mo>&#x2061;</mml:mo><mml:mi>l</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>y</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:msubsup><mml:mrow><mml:mover><mml:mi>y</mml:mi><mml:mo stretchy="false">^</mml:mo></mml:mover></mml:mrow><mml:mi>i</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mo>&#x002B;</mml:mo><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>t</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x002B;</mml:mo><mml:mrow><mml:mi mathvariant="normal">&#x03A9;</mml:mi></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>t</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>,</mml:mo></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where <inline-formula id="ieqn-37">
<!--<alternatives><inline-graphic xlink:href="ieqn-37.tif"/><tex-math id="tex-ieqn-37"><![$$CDATA[l$$]]></tex-math>--><mml:math id="mml-ieqn-37"><mml:mi>l</mml:mi></mml:math>
<!--</alternatives>--></inline-formula> represents a loss function that determines the variation among the label of the <italic>i</italic>-th sample <inline-formula id="ieqn-38">
<!--<alternatives><inline-graphic xlink:href="ieqn-38.tif"/><tex-math id="tex-ieqn-38"><![$$CDATA[{y_i}$$]]></tex-math>--><mml:math id="mml-ieqn-38"><mml:mrow><mml:msub><mml:mi>y</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> and the predictive process at the final step along with the current tree output; and <inline-formula id="ieqn-39">
<!--<alternatives><inline-graphic xlink:href="ieqn-39.tif"/><tex-math id="tex-ieqn-39"><![$$CDATA[{\Omega }\left( {{f_t}} \right)$$]]></tex-math>--><mml:math id="mml-ieqn-39"><mml:mrow><mml:mi mathvariant="normal">&#x03A9;</mml:mi></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>t</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> is the regularization norm which penalizes the difficulty of a new tree. Finally, the GBT model generates appropriate class labels of all the applied test skin lesion images.</p>
</sec>
</sec>
<sec id="s4">
<label>4</label>
<title>Performance Validation</title>
<p>The performance validation of the presented model takes place on the ISIC dataset [<xref ref-type="bibr" rid="ref-28">28</xref>] comprising images under different classes such as Angioma, Nevus, Lentigo NOS, Solar Lentigo, Melanoma, Seborrheic Keratosis, and Basal Cell Carcinoma (BCC). The images in the ISIC dataset are in the sizes of 640 &#x002A; 480 pixels. Few sample test images are illustrated in <xref ref-type="fig" rid="fig-3">Fig. 3</xref>.</p>
<fig id="fig-3">
<label>Figure 3</label>
<caption>
<title>Sample images</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_19117-fig-3.png"/>
</fig>
<p><xref ref-type="fig" rid="fig-3">Fig. 3</xref> illustrates the original dermoscopic images with their masked versions. <xref ref-type="fig" rid="fig-4">Fig. 4a</xref> shows the actual skin lesion image and the lesion region in each image is correctly masked in <xref ref-type="fig" rid="fig-4">Fig. 4b</xref>.</p>
<fig id="fig-4">
<label>Figure 4</label>
<caption>
<title>a) Original images b) Masked images</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_19117-fig-4.png"/>
</fig>
<p><xref ref-type="fig" rid="fig-5">Fig. 5</xref> depicts the confusion matrix obtained by the presented IMLT-DL model on the classification of skin lesions. The figure demonstrated that the IMLT-DL model has proficiently categorized 20 images under Angioma, 44 images under Nevus, 39 images under Lentigo NOS, 67 images under Solar Lentigo, 50 images under melanoma 52 images under Seborrheic Keratosis, and 37 images under BCC.</p>
<fig id="fig-5">
<label>Figure 5</label>
<caption>
<title>Confusion matrix for proposed IMLT-DL method</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_19117-fig-5.png"/>
</fig>
<p><xref ref-type="table" rid="table-1">Tab. 1</xref> and <xref ref-type="fig" rid="fig-6">Figs. 6</xref> and <xref ref-type="fig" rid="fig-7">7</xref> inspect the skin lesion classification results of the IMLT-DL model. The obtained experimental values demonstrated that the IMLT-DL model has appropriately classified the different skin lesion images. For instance, the IMLT-DL model has effectively classified the &#x2018;Angioma&#x2019; class with the sensitivity of 0.952, specification of 1, the accuracy of 0.997, the precision of 1, and the G-measure of 0.976. Moreover, the IMLT-DL technique has effectively classified the &#x2018;Nevus&#x2019; class with a sensitivity of 0.957, the specificity of 0.996, the accuracy of 0.991, the precision of 0.978, and the G-measure of 0.967. Followed by, the IMLT-DL approach has efficiently classified the &#x2018;Lentigo NOS&#x2019; class with the sensitivity of 0.951, specificity of 1, acc. of 0.994, the precision of 1, and G-measure of 0.975. Furthermore, the IMLT-DL model has effectively classified the &#x2018;Solar Lentigo&#x2019; class with the sensitivity of 0.985, specificity of 0.996, the accuracy of 0.994, the precision of 0.985, and G-measure of 0.985. Along with that, the IMLT-DL manner has effectively classified the &#x2018;Melanoma&#x2019; class with the sensitivity of 0.980, specificity of 1, acc. of 0.997, the precision of 1, and G-measure of 0.990. Concurrently, the IMLT-DL method has effectually classified the &#x2018;Seborrheic Keratosis&#x2019; class with the sensitivity of 0.963, specificity of 0.989, acc. of 0.984, the precision of 0.946, and G-measure of 0.954. Simultaneously, the IMLT-DL technique has efficiently classified the &#x2018;BCC&#x2019; class with the sensitivity of 1, specificity of 0.986, acc. of 0.987, the precision of 0.902, and G-measure of 0.950.</p>
<table-wrap id="table-1">
<label>Table 1</label>
<caption>
<title>Performance evaluation of different classes on proposed IMLT-DL model</title>
</caption>
<table>
<colgroup>
<col/>
<col/>
<col/>
<col/>
<col/>
<col/>
</colgroup>
<thead>
<tr>
<th>Different classes</th>
<th>Sensitivity</th>
<th>Specificity</th>
<th>Accuracy</th>
<th>Precision</th>
<th>G-measure</th>
</tr>
</thead>
<tbody>
<tr>
<td>Angioma</td>
<td>0.952</td>
<td>1.000</td>
<td>0.997</td>
<td>1.000</td>
<td>0.976</td>
</tr>
<tr>
<td>Nevus</td>
<td>0.957</td>
<td>0.996</td>
<td>0.991</td>
<td>0.978</td>
<td>0.967</td>
</tr>
<tr>
<td>Lentigo NOS</td>
<td>0.951</td>
<td>1.000</td>
<td>0.994</td>
<td>1.000</td>
<td>0.975</td>
</tr>
<tr>
<td>Solar lentigo</td>
<td>0.985</td>
<td>0.996</td>
<td>0.994</td>
<td>0.985</td>
<td>0.985</td>
</tr>
<tr>
<td>Melanoma</td>
<td>0.980</td>
<td>1.000</td>
<td>0.997</td>
<td>1.000</td>
<td>0.990</td>
</tr>
<tr>
<td>Seborrheic keratosis</td>
<td>0.963</td>
<td>0.989</td>
<td>0.984</td>
<td>0.946</td>
<td>0.954</td>
</tr>
<tr>
<td>Basal cell carcinoma</td>
<td>1.000</td>
<td>0.986</td>
<td>0.987</td>
<td>0.902</td>
<td>0.950</td>
</tr>
<tr>
<td><bold>Average</bold></td>
<td><bold>0.970</bold></td>
<td><bold>0.995</bold></td>
<td><bold>0.992</bold></td>
<td><bold>0.973</bold></td>
<td><bold>0.971</bold></td>
</tr>
</tbody>
</table>
</table-wrap>
<fig id="fig-6">
<label>Figure 6</label>
<caption>
<title>Result analysis of IMLT-DL model with different measures</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_19117-fig-6.png"/>
</fig>
<fig id="fig-7">
<label>Figure 7</label>
<caption>
<title>Precision and G-measure analysis of IMLT-DL model</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_19117-fig-7.png"/>
</fig>
<p>A detailed comparative results analysis of the IMLT-DL with other existing methods occurs in <xref ref-type="fig" rid="fig-8">Fig. 8</xref> and <xref ref-type="table" rid="table-2">Tab. 2</xref> [<xref ref-type="bibr" rid="ref-29">29</xref>&#x2013;<xref ref-type="bibr" rid="ref-34">34</xref>]. From the results, it is revealed that the SVM model has showcased worse outcomes with the sensitivity of 0.732, specificity of 0.754, and acc. of 0.743. Next to that, the high-level features model has obtained a slightly increased sensitivity of 0.835, specificity of 0.813, and acc. of 0.811. On continuing with, the CNN model has attained certainly raised the sensitivity of 0.817, specificity of 0.829, and acc. of 0.824. Followed by, the ensemble classifier model has accomplished somewhat intermediate results with the sensitivity of 0.842, specificity of 0.826, and acc. of 0.84. Then, the deep CNN model has demonstrated manageable results with the sensitivity of 0.846, specificity of 0.832, and acc. of 0.843. Eventually, the DLN model has depicted even improved outcomes with the sensitivity of 0.732, specificity of 0.754, and accuracy of 0.743. Meanwhile, the CDNN model has offered a sensitivity of 0.825, specificity of 0.975, and acc. of 0.934, whereas even better sens. of 0.802, specificity of 0.985, and acc. of 0.934 has been demonstrated by the ResNets model. Moreover, the DCCN-GC model has resulted in a reasonable sensitivity of 0.908, specificity of 0.927, and acc. of 0.934. Furthermore, the DL-ANFC model has tried to show near-optimal outcomes with the sensitivity of 0.934, specificity of 0.987, and acc. of 0.979. However, the IMLT-DL model has demonstrated the compared methods with the sensitivity of 0.97, specificity of 0.995, and acc. of 0.992.</p>
<table-wrap id="table-2">
<label>Table 2</label>
<caption>
<title>Performance of existing methods with proposed IMLT-DL model</title>
</caption>
<table>
<colgroup>
<col/>
<col/>
<col/>
<col/>
</colgroup>
<thead>
<tr>
<th>Methods</th>
<th>Sensitivity</th>
<th>Specificity</th>
<th>Accuracy</th>
</tr>
</thead>
<tbody>
<tr>
<td>Proposed IMLT-DL</td>
<td>0.970</td>
<td>0.995</td>
<td>0.992</td>
</tr>
<tr>
<td>DL-ANFC</td>
<td>0.934</td>
<td>0.987</td>
<td>0.979</td>
</tr>
<tr>
<td>CNN</td>
<td>0.817</td>
<td>0.829</td>
<td>0.824</td>
</tr>
<tr>
<td>SVM</td>
<td>0.732</td>
<td>0.754</td>
<td>0.743</td>
</tr>
<tr>
<td>High-level features</td>
<td>0.835</td>
<td>0.813</td>
<td>0.811</td>
</tr>
<tr>
<td>Deep CNN</td>
<td>0.846</td>
<td>0.832</td>
<td>0.843</td>
</tr>
<tr>
<td>Ensemble classifier</td>
<td>0.842</td>
<td>0.826</td>
<td>0.840</td>
</tr>
<tr>
<td>CDNN</td>
<td>0.825</td>
<td>0.975</td>
<td>0.934</td>
</tr>
<tr>
<td>DLN</td>
<td>0.820</td>
<td>0.978</td>
<td>0.932</td>
</tr>
<tr>
<td>ResNets</td>
<td>0.802</td>
<td>0.985</td>
<td>0.934</td>
</tr>
<tr>
<td>DCCN-GC</td>
<td>0.908</td>
<td>0.927</td>
<td>0.934</td>
</tr>
</tbody>
</table>
</table-wrap>
<fig id="fig-8">
<label>Figure 8</label>
<caption>
<title>Comparative analysis of IMLT-DL model with existing techniques</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_19117-fig-8.png"/>
</fig>
<p><xref ref-type="fig" rid="fig-9">Fig. 9</xref> demonstrates the ROC curve analysis of the proposed IMLT-DL model to classify skin lesion images. The figure showcased that the IMLT-DL model has obtained a maximum ROC of 98.765. Therefore, the IMLT-DL model has effectively classified the dermoscopic input images on the classification of skin lesions.</p>
<fig id="fig-9">
<label>Figure 9</label>
<caption>
<title>ROC analysis of proposed IMLT-DL model</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_19117-fig-9.png"/>
</fig>
<p>From the tables and figures mentioned above, it is apparent that the IMLT-DL model has accomplished effective skin lesion segmentation and classification outcome. Therefore, it can be an appropriate tool to segment and classify skin lesions using dermoscopic images in a real-time environment.</p>
</sec>
<sec id="s5">
<label>5</label>
<title>Conclusion</title>
<p>This study has developed a novel IMLT-DL model for effective skin lesion segmentation and a classification model using dermoscopic images. The IMLT-DT model diagnoses the skin lesion using different stages of operations such as pre-processing, segmentation, feature extraction, and classification. At the initial level, the presented IMLT-DL model integrates the Top hat filtering and inpainting technique for the pre-processing of the dermoscopic images. Then, multilevel thresholding-based segmentation is carried out to determine the infected skin lesion regions in the dermoscopic images. Inception v3 based feature extraction and GBT based classification processes are performed for effective skin lesion detection. The proposed IMLT-DL model is simulated using the ISIC dataset and the experimental outcomes are examined concerning several measures. The obtained simulation outcomes verified the superior performance of the IMLT-DT model by accomplishing a maximum accuracy of 0.992. In the future, the performance of the skin lesion segmentation process can be improved using advanced DL-based instantaneous segmentation techniques.</p>
</sec>
</body>
<back><fn-group>
<fn fn-type="other">
<p><bold>Funding Statement:</bold> The authors received no specific funding for this study.</p>
</fn>
<fn fn-type="conflict">
<p><bold>Conflicts of Interest:</bold> The authors declare that they have no conflicts of interest to report regarding the present study.</p>
</fn>
</fn-group>
<ref-list content-type="authoryear">
<title>References</title>
<ref id="ref-1">
<label>1</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>N.</given-names> 
<surname>Razmjooy</surname></string-name>, <string-name>
<given-names>M.</given-names> 
<surname>Ashourian</surname></string-name>, <string-name>
<given-names>M.</given-names> 
<surname>Karimifard</surname></string-name>, <string-name>
<given-names>V. V.</given-names> 
<surname>Estrela</surname></string-name>, <string-name>
<given-names>H. J.</given-names> 
<surname>Loschi</surname></string-name> <etal>et al.</etal>
</person-group><italic>,</italic> &#x201C;
<article-title>Computer-aided diagnosis of skin cancer: A review</article-title>,&#x201D; 
<source>Current Medical Imaging</source>, vol. 
<volume>16</volume>, no. 
<issue>7</issue>, pp. 
<fpage>781</fpage>&#x2013;
<lpage>793</lpage>, 
<year>2020</year>.</mixed-citation>
</ref>
<ref id="ref-2">
<label>2</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>O. T.</given-names> 
<surname>Jones</surname></string-name>, <string-name>
<given-names>C. K.</given-names> 
<surname>Ranmuthu</surname></string-name>, <string-name>
<given-names>P. N.</given-names> 
<surname>Hall</surname></string-name>, <string-name>
<given-names>G.</given-names> 
<surname>Funston</surname></string-name> and <string-name>
<given-names>F. M.</given-names> 
<surname>Walter</surname></string-name>
</person-group>, &#x201C;
<article-title>Recognising skin cancer in primary care</article-title>,&#x201D; 
<source>Advances in Therapy</source>, vol. 
<volume>37</volume>, no. 
<issue>1</issue>, pp. 
<fpage>603</fpage>&#x2013;
<lpage>616</lpage>, 
<year>2020</year>.</mixed-citation>
</ref>
<ref id="ref-3">
<label>3</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>J.</given-names> 
<surname>Feng</surname></string-name>, <string-name>
<given-names>N. G.</given-names> 
<surname>Isern</surname></string-name>, <string-name>
<given-names>S. D.</given-names> 
<surname>Burton</surname></string-name> and <string-name>
<given-names>J. Z.</given-names> 
<surname>Hu</surname></string-name>
</person-group>, &#x201C;
<article-title>Studies of secondary melanoma on C57BL/6J mouse liver using 1H NMR metabolomics</article-title>,&#x201D; 
<source>Metabolites</source>, vol. 
<volume>3</volume>, no. 
<issue>4</issue>, pp. 
<fpage>1011</fpage>&#x2013;
<lpage>1035</lpage>, 
<year>2013</year>.</mixed-citation>
</ref>
<ref id="ref-4">
<label>4</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>A.</given-names> 
<surname>Jemal</surname></string-name>, <string-name>
<given-names>R.</given-names> 
<surname>Siegel</surname></string-name>, <string-name>
<given-names>E.</given-names> 
<surname>Ward</surname></string-name>, <string-name>
<given-names>Y.</given-names> 
<surname>Hao</surname></string-name>, <string-name>
<given-names>J.</given-names> 
<surname>Xu</surname></string-name> <etal>et al.</etal>
</person-group><italic>,</italic> &#x201C;
<article-title>Cancer statistics</article-title>,&#x201D; 
<source>CA Cancer J. Clin.</source>, vol. 
<volume>69</volume>, no. 
<issue>1</issue>, pp. 
<fpage>7</fpage>&#x2013;
<lpage>34</lpage>, 
<year>2019</year>.</mixed-citation>
</ref>
<ref id="ref-5">
<label>5</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>T.</given-names> 
<surname>Tarver</surname></string-name> and <string-name>
<given-names>J.</given-names> 
<surname>Consum</surname></string-name>
</person-group>, &#x201C;
<article-title>Health internet 2012</article-title>,&#x201D; 
<source>American Cancer Society: Cancer Facts and Figures</source>, vol. 
<volume>16</volume>, no. 
<issue>1</issue>, pp. 
<fpage>366</fpage>&#x2013;
<lpage>367</lpage>, 
<year>2014</year>.</mixed-citation>
</ref>
<ref id="ref-6">
<label>6</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>R.</given-names> 
<surname>Siegel</surname></string-name>, <string-name>
<given-names>K.</given-names> 
<surname>Miller</surname></string-name> and <string-name>
<given-names>A.</given-names> 
<surname>Jemal</surname></string-name>
</person-group>, &#x201C;
<article-title>Cancer statistics, 2018</article-title>,&#x201D; 
<source>CA Cancer J. Clin.</source>, vol. 
<volume>68</volume>, no. 
<issue>1</issue>, pp. 
<fpage>7</fpage>&#x2013;
<lpage>30</lpage>, 
<year>2018</year>.</mixed-citation>
</ref>
<ref id="ref-7">
<label>7</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>G.</given-names> 
<surname>Pellacani</surname></string-name> and <string-name>
<given-names>S.</given-names> 
<surname>Seidenari</surname></string-name>
</person-group>, &#x201C;
<article-title>Comparison between morphological parameters in pigmented skin lesion images acquired using epiluminescence surface microscopy and polarized-light video microscopy</article-title>,&#x201D; 
<source>Clinical Dermatology</source>, vol. 
<volume>20</volume>, no. 
<issue>1</issue>, pp. 
<fpage>222</fpage>&#x2013;
<lpage>227</lpage>, 
<year>2002</year>.</mixed-citation>
</ref>
<ref id="ref-8">
<label>8</label><mixed-citation publication-type="conf-proc">
<person-group person-group-type="author"><string-name>
<given-names>A. R. A.</given-names> 
<surname>Ali</surname></string-name> and <string-name>
<given-names>T. M.</given-names> 
<surname>Deserno</surname></string-name>
</person-group>, &#x201C;
<article-title>A systematic review of automated melanoma detection in dermatoscopic images and its ground truth data</article-title>,&#x201D; in <conf-name>Proc. Medical Imaging 2012: Image Perception, Observer Performance, and Technology Assessment</conf-name>, 
<publisher-loc>Bellingham, WA, USA</publisher-loc>, 
<publisher-name>International Society for Optics and Photonics</publisher-name>, pp. 
<fpage>8318</fpage>, 
<year>2012</year>. </mixed-citation>
</ref>
<ref id="ref-9">
<label>9</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>E. S.</given-names> 
<surname>Madhan</surname></string-name>, <string-name>
<given-names>S.</given-names> 
<surname>Neelakandan</surname></string-name> and <string-name>
<given-names>R.</given-names> 
<surname>Annamalai</surname></string-name>
</person-group>, &#x201C;
<article-title>A novel approach for vehicle type classification and speed prediction using deep learning</article-title>,&#x201D; 
<source>Journal of Computational and Theoretical Nano science</source>, vol. 
<volume>17</volume>, no. 
<issue>5</issue>, pp. 
<fpage>2237</fpage>&#x2013;
<lpage>2242</lpage>, 
<year>2020</year>.</mixed-citation>
</ref>
<ref id="ref-10">
<label>10</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>S. M.</given-names> 
<surname>Jaisakthi</surname></string-name>, <string-name>
<given-names>P.</given-names> 
<surname>Mirunalini</surname></string-name> and <string-name>
<given-names>C.</given-names> 
<surname>Aravindan</surname></string-name>
</person-group>, &#x201C;
<article-title>Automated skin lesion segmentation of dermoscopic images using grabcut and kmeans algorithms</article-title>,&#x201D; 
<source>IET Comput. Vis.</source>, vol. 
<volume>12</volume>, no. 
<issue>1</issue>, pp. 
<fpage>1088</fpage>&#x2013;
<lpage>1095</lpage>, 
<year>2018</year>.</mixed-citation>
</ref>
<ref id="ref-11">
<label>11</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>P.</given-names> 
<surname>Agrawal</surname></string-name>, <string-name>
<given-names>D.</given-names> 
<surname>Chaudhary</surname></string-name>, <string-name>
<given-names>V.</given-names> 
<surname>Madaan</surname></string-name>, <string-name>
<given-names>A.</given-names> 
<surname>Zabrovskiy</surname></string-name>, <string-name>
<given-names>R.</given-names> 
<surname>Prodan</surname></string-name> <etal>et al.</etal>
</person-group><italic>,</italic> &#x201C;
<article-title>Automated bank cheque verification using image processing and deep learning methods</article-title>,&#x201D; 
<source>Multimedia Tools and Applications</source>, vol. 
<volume>80</volume>, no. 
<issue>1</issue>, pp. 
<fpage>5319</fpage>&#x2013;
<lpage>5350</lpage>, 
<year>2021</year>.</mixed-citation>
</ref>
<ref id="ref-12">
<label>12</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>V.</given-names> 
<surname>Madaan</surname></string-name>, <string-name>
<given-names>A.</given-names> 
<surname>Roy</surname></string-name>, <string-name>
<given-names>C.</given-names> 
<surname>Gupta</surname></string-name>, <string-name>
<given-names>P.</given-names> 
<surname>Agrawal</surname></string-name>, <string-name>
<given-names>A.</given-names> 
<surname>Sharma</surname></string-name> <etal>et al.</etal>
</person-group><italic>,</italic> &#x201C;
<article-title>XCOVNet: Chest X-ray image classification for covid-19 early detection using convolutional neural networks</article-title>,&#x201D; 
<source>New Gener. Comput.</source>, vol. 
<volume>39</volume>, no. 
<issue>2</issue>, pp. 
<fpage>1</fpage>&#x2013;
<lpage>15</lpage>, 
<year>2021</year>.</mixed-citation>
</ref>
<ref id="ref-13">
<label>13</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>M.</given-names> 
<surname>Aljanabi</surname></string-name>, <string-name>
<given-names>Y. E.</given-names> 
<surname>&#x00D6;zok</surname></string-name>, <string-name>
<given-names>J.</given-names> 
<surname>Rahebi</surname></string-name> and <string-name>
<given-names>A. S.</given-names> 
<surname>Abdullah</surname></string-name>
</person-group>, &#x201C;
<article-title>Skin lesion segmentation method for dermoscopy images using artificial bee colony algorithm</article-title>,&#x201D; 
<source>Symmetry</source>, vol. 
<volume>10</volume>, no. 
<issue>1</issue>, pp. 
<fpage>347</fpage>&#x2013;
<lpage>354</lpage>, 
<year>2010</year>.</mixed-citation>
</ref>
<ref id="ref-14">
<label>14</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>A.</given-names> 
<surname>Pennisi</surname></string-name>, <string-name>
<given-names>D. D.</given-names> 
<surname>Bloisi</surname></string-name>, <string-name>
<given-names>D.</given-names> 
<surname>Nardi</surname></string-name>, <string-name>
<given-names>A. R.</given-names> 
<surname>Giampetruzzi</surname></string-name> and <string-name>
<given-names>C.</given-names> 
<surname>Mondino</surname></string-name>
</person-group>, &#x201C;
<article-title>Skin lesion image segmentation using delaunay triangulation for melanoma detection</article-title>,&#x201D; 
<source>Computerized Medical Imaging and Graphics</source>, vol. 
<volume>52</volume>, no. 
<issue>1</issue>, pp. 
<fpage>89</fpage>&#x2013;
<lpage>103</lpage>, 
<year>2016</year>.</mixed-citation>
</ref>
<ref id="ref-15">
<label>15</label><mixed-citation publication-type="conf-proc">
<person-group person-group-type="author"><string-name>
<given-names>L.</given-names> 
<surname>Bi</surname></string-name>, <string-name>
<given-names>J.</given-names> 
<surname>Kim</surname></string-name>, <string-name>
<given-names>E.</given-names> 
<surname>Ahn</surname></string-name>, <string-name>
<given-names>D.</given-names> 
<surname>Feng</surname></string-name> and <string-name>
<given-names>M.</given-names> 
<surname>Fulham</surname></string-name>
</person-group>, &#x201C;
<article-title>Automated skin lesion segmentation via image-wise supervised learning and multi-scale superpixel based cellular automata</article-title>,&#x201D; in <conf-name>Proc. of the Int. Symp. on Biomedical Imaging</conf-name>, 
<publisher-loc>Prague, Czech Republic</publisher-loc>, pp. 
<fpage>1059</fpage>&#x2013;
<lpage>1062</lpage>, 
<year>2016</year>. </mixed-citation>
</ref>
<ref id="ref-16">
<label>16</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>L.</given-names> 
<surname>Bi</surname></string-name>, <string-name>
<given-names>J.</given-names> 
<surname>Kim</surname></string-name>, <string-name>
<given-names>E.</given-names> 
<surname>Ahn</surname></string-name>, <string-name>
<given-names>A.</given-names> 
<surname>Kumar</surname></string-name> and <string-name>
<given-names>M.</given-names> 
<surname>Fulhan</surname></string-name>
</person-group>, &#x201C;
<article-title>Dermoscopic image segmentation via multi-stage fully convolutional networks</article-title>,&#x201D; 
<source>IEEE Transactions on Biomedical Engineering</source>, vol. 
<volume>64</volume>, no. 
<issue>1</issue>, pp. 
<fpage>2065</fpage>&#x2013;
<lpage>2074</lpage>, 
<year>2017</year>.</mixed-citation>
</ref>
<ref id="ref-17">
<label>17</label><mixed-citation publication-type="other">
<person-group person-group-type="author"><string-name>
<given-names>Y.</given-names> 
<surname>Yuan</surname></string-name>
</person-group>, &#x201C;
<article-title>Automatic skin lesion segmentation with fully convolutional-deconvolutional networks</article-title>,&#x201D; 
<comment>arXiv preprint, arXiv:1703,05165</comment>, 
<year>2017</year>.</mixed-citation>
</ref>
<ref id="ref-18">
<label>18</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>M.</given-names> 
<surname>Berseth</surname></string-name>
</person-group>, &#x201C;
<article-title>Skin lesion analysis towards melanoma detection</article-title>,&#x201D; 
<source>International Skin Imaging Collaboration</source>, vol. 
<volume>18</volume>, no. 
<issue>2</issue>, pp. 
<fpage>13</fpage>&#x2013;
<lpage>18</lpage>, 
<year>2017</year>.</mixed-citation>
</ref>
<ref id="ref-19">
<label>19</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>D.</given-names> 
<surname>Paulraj</surname></string-name>
</person-group>, &#x201C;
<article-title>An automated exploring and learning model for data prediction using balanced CA-SVM</article-title>,&#x201D; 
<source>Journal of Ambient Intelligence and Humanized Computing</source>, vol. 
<volume>12</volume>, pp. 
<fpage>1</fpage>&#x2013;
<lpage>12</lpage>, 
<year>2020</year>.</mixed-citation>
</ref>
<ref id="ref-20">
<label>20</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>M. Y.</given-names> 
<surname>Sikkandar</surname></string-name>, <string-name>
<given-names>B. A.</given-names> 
<surname>Alrasheadi</surname></string-name>, <string-name>
<given-names>N. B.</given-names> 
<surname>Prakash</surname></string-name>, <string-name>
<given-names>G. R.</given-names> 
<surname>Hemalakshmi</surname></string-name>, <string-name>
<given-names>A.</given-names> 
<surname>Mohanarathinam</surname></string-name> <etal>et al.</etal>
</person-group><italic>,</italic> &#x201C;
<article-title>Deep learning based an automated skin lesion segmentation and intelligent classification model</article-title>,&#x201D; 
<source>Journal of Ambient Intelligence and Humanized Computing</source>, vol. 
<volume>12</volume>, pp. 
<fpage>1</fpage>&#x2013;
<lpage>11</lpage>, 
<year>2020</year>.</mixed-citation>
</ref>
<ref id="ref-21">
<label>21</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>J. N.</given-names> 
<surname>Kapur</surname></string-name>, <string-name>
<given-names>P. K.</given-names> 
<surname>Sahoo</surname></string-name> and <string-name>
<given-names>A. K.</given-names> 
<surname>Wong</surname></string-name>
</person-group>, &#x201C;
<article-title>A new method for gray-level picture thresholding using the entropy of the histogram</article-title>,&#x201D; 
<source>Computer Vision, Graphics, and Image Processing</source>, vol. 
<volume>29</volume>, no. 
<issue>1</issue>, pp. 
<fpage>273</fpage>&#x2013;
<lpage>285</lpage>, 
<year>1985</year>.</mixed-citation>
</ref>
<ref id="ref-22">
<label>22</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>E. H.</given-names> 
<surname>Houssein</surname></string-name>, <string-name>
<given-names>B. E. D.</given-names> 
<surname>Helmy</surname></string-name>, <string-name>
<given-names>D.</given-names> 
<surname>Oliva</surname></string-name>, <string-name>
<given-names>A. A.</given-names> 
<surname>Elngar</surname></string-name> and <string-name>
<given-names>H.</given-names> 
<surname>Shaban</surname></string-name>
</person-group>, &#x201C;
<article-title>A novel black widow optimization algorithm for multilevel thresholding image segmentation</article-title>,&#x201D; 
<source>Expert Systems with Applications</source>, vol. 
<volume>167</volume>, no. 
<issue>1</issue>, pp. 
<fpage>114</fpage>&#x2013;
<lpage>159</lpage>, 
<year>2021</year>.</mixed-citation>
</ref>
<ref id="ref-23">
<label>23</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>Z. M.</given-names> 
<surname>Gao</surname></string-name>, <string-name>
<given-names>J.</given-names> 
<surname>Zhao</surname></string-name>, <string-name>
<given-names>S.</given-names> 
<surname>R.Li</surname></string-name> and <string-name>
<given-names>Y. R.</given-names> 
<surname>Hu</surname></string-name>
</person-group>, &#x201C;
<article-title>The improved mayfly optimization algorithm</article-title>,&#x201D; 
<source>Journal of Physics: IOP Conference Series</source>, vol. 
<volume>1684</volume>, no. 
<issue>1</issue>, pp. 
<fpage>12077</fpage>, 
<year>2020</year>.</mixed-citation>
</ref>
<ref id="ref-24">
<label>24</label><mixed-citation publication-type="conf-proc">
<person-group person-group-type="author"><string-name>
<given-names>A.</given-names> 
<surname>Krizhevsky</surname></string-name>, <string-name>
<given-names>I.</given-names> 
<surname>Sutskever</surname></string-name> and <string-name>
<given-names>G. E.</given-names> 
<surname>Hinton</surname></string-name>
</person-group>, &#x201C;
<article-title>ImageNet classification with deep convolutional neural networks</article-title>,&#x201D; in <conf-name>Proc. of the 25th Int. Conf. on Neural Information Processing Systems</conf-name>, 
<publisher-loc>Lake Tahoe, Nevada, USA</publisher-loc>, pp. 
<fpage>1097</fpage>&#x2013;
<lpage>1105</lpage>, 
<year>2012</year>. </mixed-citation>
</ref>
<ref id="ref-25">
<label>25</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>C.</given-names> 
<surname>Lin</surname></string-name>, <string-name>
<given-names>L.</given-names> 
<surname>Li</surname></string-name>, <string-name>
<given-names>W.</given-names> 
<surname>Luo</surname></string-name>, <string-name>
<given-names>K. C.</given-names> 
<surname>Wang</surname></string-name> and <string-name>
<given-names>J.</given-names> 
<surname>Guo</surname></string-name>
</person-group>, &#x201C;
<article-title>Transfer learning based traffic sign recognition using inception-v3 model</article-title>,&#x201D; 
<source>Periodica Polytechnica Transportation Engineering</source>, vol. 
<volume>47</volume>, no. 
<issue>3</issue>, pp. 
<fpage>242</fpage>&#x2013;
<lpage>250</lpage>, 
<year>2019</year>.</mixed-citation>
</ref>
<ref id="ref-26">
<label>26</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>Y.</given-names> 
<surname>Liu</surname></string-name>, <string-name>
<given-names>Y.</given-names> 
<surname>Gu</surname></string-name>, <string-name>
<given-names>J. C.</given-names> 
<surname>Nguyen</surname></string-name>, <string-name>
<given-names>H.</given-names> 
<surname>Li</surname></string-name>, <string-name>
<given-names>J.</given-names> 
<surname>Zhang</surname></string-name> <etal>et al.</etal>
</person-group> &#x201C;
<article-title>Symptom severity classification with gradient tree boosting</article-title>,&#x201D; 
<source>Journal of Biomedical Informatics</source>, vol. 
<volume>75</volume>, pp. 
<fpage>105</fpage>&#x2013;
<lpage>111</lpage>, 
<year>2017</year>.</mixed-citation>
</ref>
<ref id="ref-27">
<label>27</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>S.</given-names> 
<surname>Neelakandan</surname></string-name> and <string-name>
<given-names>D.</given-names> 
<surname>Paulraj</surname></string-name>
</person-group>, &#x201C;
<article-title>A gradient boosted decision tree-based sentiment classification of twitter data</article-title>,&#x201D; 
<source>International Journal of Wavelets, Multiresolution and Information Processing</source>, vol. 
<volume>18</volume>, no. 
<issue>4</issue>, pp. 
<fpage>1</fpage>&#x2013;
<lpage>21</lpage>, 
<year>2020</year>.</mixed-citation>
</ref>
<ref id="ref-28">
<label>28</label><mixed-citation publication-type="other">
<person-group person-group-type="author"><string-name>
<given-names>S.</given-names> 
<surname>Divyabharathi</surname></string-name>
</person-group>, 
<comment>&#x201C;Large scale optimization to minimize network traffic using MapReduce in big data applications,&#x201D; <italic>International Conference on Computation of Power Energy Information and Communication</italic>, pp. 193&#x2013;199, 2016</comment>.</mixed-citation>
</ref>
<ref id="ref-29">
<label>29</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>D.</given-names> 
<surname>Po&#x0142;ap</surname></string-name>, <string-name>
<given-names>A.</given-names> 
<surname>Winnicka</surname></string-name>, <string-name>
<given-names>K.</given-names> 
<surname>Serwata</surname></string-name>, <string-name>
<given-names>K.</given-names> 
<surname>K&#x0119;sik</surname></string-name> and <string-name>
<given-names>M.</given-names> 
<surname>Wo&#x017A;niak</surname></string-name>
</person-group>, &#x201C;
<article-title>An intelligent system for monitoring skin diseases</article-title>,&#x201D; 
<source>Sensors</source>, vol. 
<volume>18</volume>, no. 
<issue>8</issue>, pp. 
<fpage>25</fpage>&#x2013;
<lpage>52</lpage>, 
<year>2018</year>.</mixed-citation>
</ref>
<ref id="ref-30">
<label>30</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>S.</given-names> 
<surname>Satpathy</surname></string-name>, <string-name>
<given-names>P.</given-names> 
<surname>Mohan</surname></string-name>, <string-name>
<given-names>S.</given-names> 
<surname>Das</surname></string-name> and <string-name>
<given-names>S.</given-names> 
<surname>Debbarma</surname></string-name>
</person-group>, &#x201C;
<article-title>A new healthcare diagnosis system using an IoT-based fuzzy classifier with FPGA</article-title>,&#x201D; 
<source>Journal of Supercomputing</source>, vol. 
<volume>76</volume>, no. 
<issue>8</issue>, pp. 
<fpage>5849</fpage>&#x2013;
<lpage>5861</lpage>, 
<year>2020</year>.</mixed-citation>
</ref>
<ref id="ref-31">
<label>31</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>H. M.</given-names> 
<surname>&#x00DC;nver</surname></string-name> and <string-name>
<given-names>E.</given-names> 
<surname>Ayan</surname></string-name>
</person-group>, &#x201C;
<article-title>Skin lesion segmentation in dermoscopic images with combination of YOLO and grabcut algorithm</article-title>,&#x201D; 
<source>Diagnostics</source>, vol. 
<volume>9</volume>, no. 
<issue>3</issue>, pp. 
<fpage>72</fpage>, 
<year>2019</year>.</mixed-citation>
</ref>
<ref id="ref-32">
<label>32</label><mixed-citation publication-type="other">
<person-group person-group-type="author"><string-name>
<given-names>Y.</given-names> 
<surname>Yuan</surname></string-name>, <string-name>
<given-names>M.</given-names> 
<surname>Chao</surname></string-name> and <string-name>
<given-names>Y. C.</given-names> 
<surname>Lo</surname></string-name>
</person-group>, &#x201C;
<article-title>Automatic skin lesion segmentation with fully convolutional-deconvolutional networks</article-title>,&#x201D; 
<comment>arXiv preprint, arXiv:1703.05165</comment>, 
<year>2017</year>.</mixed-citation>
</ref>
<ref id="ref-33">
<label>33</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>S.</given-names> 
<surname>Satpathy</surname></string-name>, <string-name>
<given-names>M.</given-names> 
<surname>Prakash</surname></string-name>, <string-name>
<given-names>S.</given-names> 
<surname>Debbarma</surname></string-name>, <string-name>
<given-names>A. S.</given-names> 
<surname>Sengupta</surname></string-name> and <string-name>
<given-names>B. K. D.</given-names> 
<surname>Bhattacaryya</surname></string-name>
</person-group>, &#x201C;
<article-title>Design a FPGA, fuzzy based, insolent method for prediction of multi-diseases in rural area</article-title>,&#x201D; 
<source>Journal of Intelligent &#x0026; Fuzzy Systems</source>, vol. 
<volume>37</volume>, no. 
<issue>5</issue>, pp. 
<fpage>7039</fpage>&#x2013;
<lpage>7046</lpage>, 
<year>2019</year>.</mixed-citation>
</ref>
<ref id="ref-34">
<label>34</label><mixed-citation publication-type="other">
<person-group person-group-type="author"><string-name>
<given-names>L.</given-names> 
<surname>Bi</surname></string-name>, <string-name>
<given-names>J.</given-names> 
<surname>Kim</surname></string-name>, <string-name>
<given-names>E.</given-names> 
<surname>Ahn</surname></string-name> and <string-name>
<given-names>D.</given-names> 
<surname>Feng</surname></string-name>
</person-group>, &#x201C;
<article-title>Automatic skin lesion analysis using large-scale dermoscopy images and deep residual networks</article-title>,&#x201D; 
<comment>arXiv preprint, arXiv:1703.04197</comment>, 
<year>2017</year>.</mixed-citation>
</ref>
</ref-list>
</back>
</article>