<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.1 20151215//EN" "http://jats.nlm.nih.gov/publishing/1.1/JATS-journalpublishing1.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="1.1">
<front>
<journal-meta>
<journal-id journal-id-type="pmc">CSSE</journal-id>
<journal-id journal-id-type="nlm-ta">CSSE</journal-id>
<journal-id journal-id-type="publisher-id">CSSE</journal-id>
<journal-title-group>
<journal-title>Computer Systems Science &#x0026; Engineering</journal-title>
</journal-title-group>
<issn pub-type="ppub">0267-6192</issn>
<publisher>
<publisher-name>Tech Science Press</publisher-name>
<publisher-loc>USA</publisher-loc>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">24674</article-id>
<article-id pub-id-type="doi">10.32604/csse.2023.024674</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Article</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Brain Tumor Diagnosis Using Sparrow Search Algorithm Based Deep Learning Model</article-title><alt-title alt-title-type="left-running-head">Brain Tumor Diagnosis Using Sparrow Search Algorithm Based Deep Learning Model</alt-title><alt-title alt-title-type="right-running-head">Brain Tumor Diagnosis Using Sparrow Search Algorithm Based Deep Learning Model</alt-title>
</title-group>
<contrib-group content-type="authors">
<contrib id="author-1" contrib-type="author">
<name name-style="western"><surname>Ignisha Rajathi</surname><given-names>G.</given-names></name>
<xref ref-type="aff" rid="aff-1">1</xref>
</contrib>
<contrib id="author-2" contrib-type="author">
<name name-style="western"><surname>Ramesh Kumar</surname><given-names>R.</given-names></name>
<xref ref-type="aff" rid="aff-2">2</xref>
</contrib>
<contrib id="author-3" contrib-type="author">
<name name-style="western"><surname>Ravikumar</surname><given-names>D.</given-names></name>
<xref ref-type="aff" rid="aff-3">3</xref>
</contrib>
<contrib id="author-4" contrib-type="author">
<name name-style="western"><surname>Joel</surname><given-names>T.</given-names></name>
<xref ref-type="aff" rid="aff-4">4</xref>
</contrib>
<contrib id="author-5" contrib-type="author">
<name name-style="western"><surname>Kadry</surname><given-names>Seifedine</given-names></name>
<xref ref-type="aff" rid="aff-4">4</xref>
<xref ref-type="aff" rid="aff-5">5</xref>
</contrib>
<contrib id="author-6" contrib-type="author">
<name name-style="western"><surname>Jeong</surname><given-names>Chang-Won</given-names></name>
<xref ref-type="aff" rid="aff-6">6</xref>
</contrib>
<contrib id="author-7" contrib-type="author" corresp="yes">
<name name-style="western"><surname>Nam</surname><given-names>Yunyoung</given-names></name>
<xref ref-type="aff" rid="aff-7">7</xref><email>ynam@sch.ac.kr</email>
</contrib>
<aff id="aff-1"><label>1</label><institution>Department of Computer Science and Business Systems, Sri Krishna College of Engineering and Technology</institution>, <addr-line>Coimbatore, 641008</addr-line>, <country>India</country></aff>
<aff id="aff-2"><label>2</label><institution>Department of Information Technology, Sri Krishna College of Technology</institution>, <addr-line>Coimbatore, 641008</addr-line>, <country>India</country></aff>
<aff id="aff-3"><label>3</label><institution>Department of Electronics and Communication Engineering, Kings Engineering College</institution>, <addr-line>Chennai, 602117</addr-line>, <country>India</country></aff>
<aff id="aff-4"><label>4</label><institution>Department of Electronics and Communication Engineering, R.M.K. Engineering College</institution>, <addr-line>Chennai, 601206</addr-line>, <country>India</country></aff>
<aff id="aff-5"><label>5</label><institution>Deparmtent of Applied Data Science, Noroff University College</institution>, <addr-line>Kristiansand</addr-line>, <country>Norway</country></aff>
<aff id="aff-6"><label>6</label><institution>Medical Convergence Research Center, Wonkwang University</institution>, <addr-line>Iksan, Korea</addr-line></aff>
<aff id="aff-7"><label>7</label><institution>Department of Computer Science and Engineering, Soonchunhyang University</institution>, <addr-line>Asan, Korea</addr-line></aff>
</contrib-group><author-notes><corresp id="cor1"><label>&#x002A;</label>Corresponding Author: Yunyoung Nam. Email: <email>ynam@sch.ac.kr</email></corresp></author-notes>
<pub-date pub-type="epub" date-type="pub" iso-8601-date="2022-06-07"><day>07</day>
<month>06</month>
<year>2022</year></pub-date>
<volume>44</volume>
<issue>2</issue>
<fpage>1793</fpage>
<lpage>1806</lpage>
<history>
<date date-type="received"><day>27</day><month>10</month><year>2021</year></date>
<date date-type="accepted"><day>31</day><month>12</month><year>2021</year></date>
</history>
<permissions>
<copyright-statement>&#x00A9; 2023 Ignisha Rajathi et al.</copyright-statement>
<copyright-year>2023</copyright-year>
<copyright-holder>Ignisha Rajathi et al.</copyright-holder>
<license xlink:href="https://creativecommons.org/licenses/by/4.0/">
<license-p>This work is licensed under a <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution 4.0 International License</ext-link>, which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.</license-p>
</license>
</permissions>
<self-uri content-type="pdf" xlink:href="TSP_CSSE_24674.pdf"></self-uri>
<abstract>
<p>Recently, Internet of Medical Things (IoMT) has gained considerable attention to provide improved healthcare services to patients. Since earlier diagnosis of brain tumor (BT) using medical imaging becomes an essential task, automated IoMT and cloud enabled BT diagnosis model can be devised using recent deep learning models. With this motivation, this paper introduces a novel IoMT and cloud enabled BT diagnosis model, named IoMTC-HDBT. The IoMTC-HDBT model comprises the data acquisition process by the use of IoMT devices which captures the magnetic resonance imaging (MRI) brain images and transmit them to the cloud server. Besides, adaptive window filtering (AWF) based image preprocessing is used to remove noise. In addition, the cloud server executes the disease diagnosis model which includes the sparrow search algorithm (SSA) with GoogleNet (SSA-GN) model. The IoMTC-HDBT model applies functional link neural network (FLNN), which has the ability to detect and classify the MRI brain images as normal or abnormal. It finds useful to generate the reports instantly for patients located in remote areas. The validation of the IoMTC-HDBT model takes place against BRATS2015 Challenge dataset and the experimental analysis is carried out interms of sensitivity, accuracy, and specificity. The experimentation outcome pointed out the betterment of the proposed model with the accuracy of 0.984.</p>
</abstract>
<kwd-group kwd-group-type="author">
<kwd>Internet of medical things</kwd>
<kwd>healthcare</kwd>
<kwd>brain tumor</kwd>
<kwd>disease classification</kwd>
<kwd>deep learning</kwd>
<kwd>metaheuristics</kwd>
</kwd-group>
</article-meta>
</front>
<body>
<sec id="s1">
<label>1</label>
<title>Introduction</title>
<p>With the extensive use of equipment in the limited security provisioning, medical sector, and lower power computing device, several security intimidations might occur. The more dangerous security threats in the healthcare sector are confronted by brain tumors since the survival rate is around 34.9% [<xref ref-type="bibr" rid="ref-1">1</xref>]. With the management of Internet of things (IoT) devices [<xref ref-type="bibr" rid="ref-2">2</xref>], especially Internet of medical things (IoMT), patients&#x2019; information could be stolen by the attackers using botnet [<xref ref-type="bibr" rid="ref-3">3</xref>]. Thus, the privacy of IoMT device is indispensable [<xref ref-type="bibr" rid="ref-4">4</xref>,<xref ref-type="bibr" rid="ref-5">5</xref>]. The brain is considered as the primary organ, take the responsibility for emotions, memory, motor skills, vision, respiration, reactions, and many other regulating functions of the human body. This function is seriously damaged if any tumor begins to grow inside the brain. This tumor has most important brain tumor that begins to grow inside the brain, i.e., a metastasis brain tumor/the growth of brain tissues itself, that grows in other parts of body as well as spread to brain. The irregular growth of cells in a brain is called a brain tumor that results in secondary (malignant)/primary (benign) tumor. The secondary tumor is cancerous and might extend to another part of the brain and body whereas, Primary tumor is non-cancerous and doesn&#x2019;t spread from one portion of a brain to another. While the development of a malignant/benign, the skull is enforced to expand and, consequently, damage the brain, i.e., very dangerous. Therefore, the accurate prediction of brain tumors at an earlier stage is highly significant for the treatment, diagnosis, and prognosis, i.e., only feasible by employing secured methods or algorithms on IoMT device. Conventional approaches used to detect brain tumor is biopsy and investigation of computed tomography (CT) scans images by persons.</p>
<p>With the growth of medical imaging techniques (CT scan, magnetic resonance imaging (MRI), and so on.) as well as the advances in digital image processing, computer-aided diagnosis (CAD) of tumors and tissues has improved [<xref ref-type="bibr" rid="ref-6">6</xref>]. During the previous decades, various methods have been projected for CAD systems for brain tumors, like Support Vector Machine (SVM) [<xref ref-type="bibr" rid="ref-7">7</xref>], fused vectors, deep networks, and transfer learning. With the current trends in deep networks, Convolutional Neural Network (CNN) method was extensively employed for distinct CAD schemes [<xref ref-type="bibr" rid="ref-8">8</xref>]. CNN is a sequence of multiple layers in which all the layers extract features and transform a complicated input to an activation format, by means of partial differential function. CNN framework consists of fully connected, convolution, and pooling layers. While the convolutional layers gradually extract features, pooling layer down sample with the spatial domain, and the fully connected layer categorizes [<xref ref-type="bibr" rid="ref-9">9</xref>]. A vanishing gradient problem might increase while smaller number appears when computing gradients. The CNN layer consists of output, input, dropout, and network in network layers [<xref ref-type="bibr" rid="ref-10">10</xref>].</p>
<p>This paper introduces a novel IoMT and cloud enabled BT diagnosis model, named IoMTC-HDBT. The IoMTC-HDBT model comprises the data acquisition process by the use of IoMT devices which captures the MRI brain images and transmit them to the cloud server. Besides, adaptive window filtering (AWF) based image preprocessing is used to remove noise. In addition, the cloud server executes the disease diagnosis model based on sparrow search algorithm (SSA) with GoogleNet (SSA-GN). The IoMTC-HDBT model applies functional link neural network (FLNN). The performance evaluation of the IoMTC-HDBT model is carried out using the BRATS2015 Challenge dataset and the results are inspected under varying measures.</p>
</sec>
<sec id="s2">
<label>2</label>
<title>Related Works</title>
<p>Reference [<xref ref-type="bibr" rid="ref-11">11</xref>] includes the application of a deep convolutional neural network (DCNN) to diagnose brain tumors from magnetic resonance (MR) images. The datasets contain 253 brain MR images of which 155 images are stated to have tumors. This method could distinguish the MR images using tumors with 96% of total accuracy. In [<xref ref-type="bibr" rid="ref-12">12</xref>], a CNN based complex network (CNNBCN) using an adapted activation function for the MRI classification of brain tumors has been proposed. The network framework isn&#x2019;t designed and optimized automatically; however, it is created by arbitrarily created graph algorithm. This randomly generated graph is mapped to a computable neural network (NN) through a network generator.</p>
<p>Reference [<xref ref-type="bibr" rid="ref-13">13</xref>] presented a novel technique for forecasting the probability of being malignant brain tumors based on human physical symptoms with IoT. Another advanced technique to detect the malignant brain tumor based MRI image uses X-ray light for detecting malignant tumors, i.e., harmful for health and more expensive. Alternatively, the presented method is developed in a portable manner for monitoring the real-world blood pressure, body temperature, heart rate as well as best suited to predict the probability of being malignant tumors than present technologies. In [<xref ref-type="bibr" rid="ref-14">14</xref>], a Rectangular Microstrip Patch Antenna was developed for Microwave Imaging (MI) using a frequency range of 1.5 to 3 GHz at a resonant frequency of 2.3 GHz (5G-Band) in the CST Studio Suite Software for identifying brain tumors. Reference [<xref ref-type="bibr" rid="ref-15">15</xref>] present a methodology of multilevel features extraction and concatenation for earlier diagnoses of brain tumors. The 2 pretrained deep learning (DL) methods that are DensNet201 and Inception-v3 make this method effective. By using this method, 2 distinct scenarios of brain tumor diagnosis and its classification have been estimated. Initially, the feature from distinct Inception models have been extracted from pretrained Inception-v3 module and concatenated this feature for brain tumor classification. Next, this feature is passed onto softmax classifiers for classifying the brain tumor. Then, pretrained DensNet201 has been employed for extracting features from different DensNet blocks. Subsequently, this feature was concatenated and passed onto softmax classifier for classifying the brain tumor.</p>
<p>Reference [<xref ref-type="bibr" rid="ref-16">16</xref>] introduce a Hybrid Two-Track U-Net (HTTU-Net) framework for brain tumor segmentation. This framework leverages the usage of Leaky Rectified linear unit (Relu) activation method. It comprises 2 tracks; all have a distinct amount of layers and use a dissimilar kernel size. Next, combine these 2 tracks for generating the last segmentation. They utilize the generalized Dice (GDL), loss functions, and focal loss, for addressing the problems of class imbalance. In [<xref ref-type="bibr" rid="ref-17">17</xref>], a DL method-based CNN is presented for classifying various brain tumor kinds with 2 open-source databases. The previous one categorizes tumors into (pituitary, meningioma, glioma, and tumor). Reference [<xref ref-type="bibr" rid="ref-18">18</xref>] adapt deep separable convolution layer for replacing the standard framework in the U-Net for distinguishing the appearance and spatial correlations of the mapped convolution network. They present residual skip connection to the ACU-Net for heightening the propagation capacity of the feature and accelerating the convergence speed of networks for realizing the capture of deep abnormal regions.</p>
</sec>
<sec id="s3">
<label>3</label>
<title>The Proposed Model</title>
<p>In this study, a novel IoMTC-HDBT technique has been presented to detect and classify the BT using medical images. The proposed IoMTC-HDBT technique encompasses AWF based image filtering, GoogleNet based feature extraction, SSA based hyperparameter tuning, and FLNN based classification. The IoMTC-HDBT technique has the ability to detect and classify the MRI brain images as normal or abnormal.</p>
<sec id="s3_1">
<label>3.1</label>
<title>Image Acquisition</title>
<p>For gathering the medical data of the patients, smart IoMT devices are utilized. The medicinal IoMT devices capture the images and transmit them to the cloud server. Then, the cloud server receives the medical images and executes the disease diagnosis model, involving different subprocesses as discussed in the following.</p>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Image Pre-processing</title>
<p>At this stage, the AWF technique is applied to remove the noise that exists in the medical images. In order to study, an AWF technique was implemented for filtering the impulse noise [<xref ref-type="bibr" rid="ref-19">19</xref>]. Based on the radio of pixels are affected as the impulse noise from distinct regions, alters the dimensional of filter window.</p>
<p>Assume the primary dimensional of filter window is <inline-formula id="ieqn-1">
<mml:math id="mml-ieqn-1"><mml:mi>n</mml:mi><mml:mo>&#x00D7;</mml:mo><mml:mi>n</mml:mi></mml:math>
</inline-formula> (<inline-formula id="ieqn-2">
<mml:math id="mml-ieqn-2"><mml:mi>n</mml:mi></mml:math>
</inline-formula> is odd number), the amount of noise pixel from the window is <inline-formula id="ieqn-3">
<mml:math id="mml-ieqn-3"><mml:mi>m</mml:mi></mml:math>
</inline-formula>, and the reach that is affected by impulse noise is</p>
<p><disp-formula id="eqn-1"><label>(1)</label>
<mml:math id="mml-eqn-1" display="block"><mml:mi>&#x03B3;</mml:mi><mml:mo>=</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mfrac><mml:mi>m</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mo>&#x00D7;</mml:mo><mml:mi>n</mml:mi></mml:mrow></mml:mfrac></mml:mrow><mml:mo>&#x00D7;</mml:mo><mml:mn>100</mml:mn><mml:mi mathvariant="normal">&#x0025;</mml:mi><mml:mspace width="thickmathspace" /></mml:mstyle></mml:math>
</disp-formula></p>
<p>The adaptive MF has separated as to part a and part <inline-formula id="ieqn-4">
<mml:math id="mml-ieqn-4"><mml:mi>b</mml:mi></mml:math>
</inline-formula>:</p>
<p><inline-formula id="ieqn-5">
<mml:math id="mml-ieqn-5"><mml:mi>a</mml:mi></mml:math>
</inline-formula>. <italic>The extent affecting</italic>: <inline-formula id="ieqn-6">
<mml:math id="mml-ieqn-6"><mml:mi>&#x03B3;</mml:mi><mml:mo>&#x003C;</mml:mo><mml:mi>T</mml:mi></mml:math>
</inline-formula> <italic>then</italic> <inline-formula id="ieqn-7">
<mml:math id="mml-ieqn-7"><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mi>N</mml:mi></mml:msubsup><mml:mo>=</mml:mo><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mi>M</mml:mi></mml:msubsup><mml:mo>=</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mfrac><mml:mrow><mml:msubsup><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /></mml:msubsup><mml:mo>&#x2061;</mml:mo><mml:msubsup><mml:mrow><mml:mi mathvariant="normal">S</mml:mi></mml:mrow><mml:mrow><mml:mrow><mml:mi mathvariant="normal">i</mml:mi></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mi mathvariant="normal">j</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi mathvariant="normal">S</mml:mi></mml:mrow></mml:msubsup></mml:mrow><mml:mi>&#x03C4;</mml:mi></mml:mfrac></mml:mrow><mml:mo>,</mml:mo></mml:mstyle></mml:math>
</inline-formula> <inline-formula id="ieqn-8">
<mml:math id="mml-ieqn-8"><mml:mi>&#x03B3;</mml:mi><mml:mo>&#x2265;</mml:mo><mml:mi>T</mml:mi></mml:math>
</inline-formula> <italic>then jump to part</italic> <inline-formula id="ieqn-9">
<mml:math id="mml-ieqn-9"><mml:mi>b</mml:mi><mml:mo>.</mml:mo></mml:math>
</inline-formula></p>
<p><inline-formula id="ieqn-10">
<mml:math id="mml-ieqn-10"><mml:mi>b</mml:mi></mml:math>
</inline-formula>. <italic>Extend the filter window</italic>: <italic>an increase the filter window to</italic> <inline-formula id="ieqn-11">
<mml:math id="mml-ieqn-11"><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:mover><mml:mi>n</mml:mi><mml:mo stretchy="false">&#x005E;</mml:mo></mml:mover></mml:mrow><mml:mo>+</mml:mo><mml:mn>2</mml:mn></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x00D7;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:mover><mml:mi>n</mml:mi><mml:mo stretchy="false">&#x005E;</mml:mo></mml:mover></mml:mrow><mml:mo>+</mml:mo><mml:mn>2</mml:mn></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math>
</inline-formula>, <italic>and recomputed</italic> <inline-formula id="ieqn-12">
<mml:math id="mml-ieqn-12"><mml:mi>&#x03B3;</mml:mi></mml:math>
</inline-formula>, <italic>jump to part</italic> <inline-formula id="ieqn-13">
<mml:math id="mml-ieqn-13"><mml:mi>a</mml:mi><mml:mo>.</mml:mo></mml:math>
</inline-formula></p>
<p>Where, <inline-formula id="ieqn-14">
<mml:math id="mml-ieqn-14"><mml:mrow><mml:mover><mml:mi>n</mml:mi><mml:mo stretchy="false">&#x005E;</mml:mo></mml:mover></mml:mrow><mml:mo>&#x00D7;</mml:mo><mml:mrow><mml:mover><mml:mi>n</mml:mi><mml:mo stretchy="false">&#x005E;</mml:mo></mml:mover></mml:mrow></mml:math>
</inline-formula> refers the dimensional of final filter window, <inline-formula id="ieqn-15">
<mml:math id="mml-ieqn-15"><mml:mi>T</mml:mi></mml:math>
</inline-formula> represents the amount of non-noise pixels from the filter window, <inline-formula id="ieqn-16">
<mml:math id="mml-ieqn-16"><mml:msubsup><mml:mi>S</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mi>M</mml:mi></mml:msubsup></mml:math>
</inline-formula> signifies the median of non-noise pixel from the filter window. <inline-formula id="ieqn-17">
<mml:math id="mml-ieqn-17"><mml:mi>T</mml:mi></mml:math>
</inline-formula> demonstrate the threshold of extent which is influenced by impulse noise. During the circumstances that the dimensional of MF window was set, if the count of noise obtains 3/10 of the count of filter window pixel, the filter outcome alters for unacceptable. Hence the threshold <inline-formula id="ieqn-18">
<mml:math id="mml-ieqn-18"><mml:mi>T</mml:mi></mml:math>
</inline-formula> is set 0.3 for getting an optimum filter outcome. The benefit of adaptive window filter technique:<list list-type="order"><list-item>
<p>As adaptive window filter functions for changing the dimensional of filter window based on the influence extents of impulse noise, not only this it can resolve the entire failure of MF technique, i.e., it is elected the adaptive filter window for getting optimum filter outcome.</p></list-item><list-item>
<p>An only noise signal is filtered, and maintain the helpful signals that aren&#x2019;t influenced as the impulse noise. During the course of filter, only non-noise pixels do the filter function, and the noise pixels have been foreclosed. Afterward, it is lesser the outcome of impulse noise for the filtering resultant.</p></list-item><list-item>
<p>Only impulse noise pixel filters, thus related to standard MF technique, the speed is significantly higher, and it can improve the feasibility of the techniques.</p></list-item></list></p>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>SSA with GoogleNet Based Feature Extraction</title>
<p>During feature extraction, the preprocessed images are fed into the GoogleNet model to derive feature vectors. The CNN can extract accurate feature vectors due to their strong feature-learning capability. But, it could not implement time-series network packet recognition. So, the CNN-long short term memory (LSTM) technique is particularly planned for effectual intrusion recognition, where the CNN has primarily utilized for obtaining the feature vector which is then utilized to time-series recognition by LSTM network. Convention CNN isn&#x2019;t efficient at feature extraction if utilized with industrial control information which is a huge amount of features and displays difficult variation. Therefore, in the presented manner, GoogLeNet has been utilized rather than convention CNN for obtaining further accurate feature vectors. The GoogLeNet has been kind of CNN with different frameworks named as inception modules [<xref ref-type="bibr" rid="ref-20">20</xref>]. It implements computations utilizing distinct kinds of kernels from single layer; however, convention CNN is only one variety of kernels.</p>
<p>The generally utilized kernel from GoogLeNet are 1 &#x00D7; 1, 3 &#x00D7; 3, 5 &#x00D7; 5, and 7 &#x00D7; 7, and the computation outcomes in this kernel are joined as to last output. The feature is an optimum signified to calculate the distinct scales. The several inception components are stacked for performing computations from various layers. The width as well as depth of network is superior to individuals of convention CNN. Thus the outcome, the GoogLeNet is to remove richer features in industrial control information. Also, the GoogLeNet is carry out functions for handling the huge amount of parameters rising in utilize of numerous kinds of kernels and various layers from the networks. The 1 &#x00D7; n convolutional kernel and n &#x00D7; 1 convolutional kernel are utilized for replacing n &#x00D7; n convolutional kernel to maximum dimension feature modeling. So the GoogLeNet has been appropriate to utilize as lightweight feature extraction networks from the presented technique.</p>
<p>To optimally modify the hyperparameters involved in the GoogleNet model, the SSA is utilized [<xref ref-type="bibr" rid="ref-21">21</xref>]. The SSA has a novel kind of swarm intelligence (SI) optimized technique simulated as detecting the foraging as well as anti-predation performance of sparrow. Its bionic rules are as follows:</p>
<p>Two behavior approaches from the foraging procedure of sparrows, one is the finder and another is joiner. The finder has been leader of populations, mostly finding the foraging region and particular way to the whole sparrow populations, and the joiner commonly follows the lead of the finder for obtaining food. Simultaneously, for increasing its rate of predation, any joiners are monitoring the finder for competing with them to food/forage in their surroundings. If the whole sparrow population is threatened as predator/recognizes the danger, it is conducting anti-predation performance.</p>
<p>In SSA, the finder offers importance to food and takes a higher foraging range than the finder. The finder usually accounts for [10%&#x2013;20%] of populations, and the place upgrade equation is as follows at all iterations:</p>
<p><disp-formula id="eqn-2"><label>(2)</label>
<mml:math id="mml-eqn-2" display="block"><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mrow><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mi>t</mml:mi></mml:msubsup><mml:mo>&#x22C5;</mml:mo><mml:mi>exp</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mfrac><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>&#x03B1;</mml:mi><mml:mo>&#x22C5;</mml:mo><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>m</mml:mi><mml:mi>a</mml:mi><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow></mml:msub></mml:mrow></mml:mrow></mml:mfrac></mml:mrow></mml:mstyle></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mtd><mml:mtd><mml:mrow><mml:mi>i</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thickmathspace" /><mml:mrow><mml:msub><mml:mi>R</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mo>&#x003C;</mml:mo><mml:mi>S</mml:mi><mml:mi>T</mml:mi></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mi>t</mml:mi></mml:msubsup><mml:mo>+</mml:mo><mml:mi>Q</mml:mi><mml:mo>&#x22C5;</mml:mo><mml:mi>L</mml:mi></mml:mrow></mml:mtd><mml:mtd><mml:mrow><mml:mi>i</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thickmathspace" /><mml:mrow><mml:msub><mml:mi>R</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mo>&#x2265;</mml:mo><mml:mi>S</mml:mi><mml:mi>T</mml:mi></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo fence="true" stretchy="true" symmetric="true"></mml:mo></mml:mrow></mml:math>
</disp-formula></p>
<p>where: <inline-formula id="ieqn-19">
<mml:math id="mml-ieqn-19"><mml:mi>t</mml:mi></mml:math>
</inline-formula> implies the present amount of iterations; <inline-formula id="ieqn-20">
<mml:math id="mml-ieqn-20"><mml:mi>T</mml:mi></mml:math>
</inline-formula> signifies the maximal amount of iterations; <inline-formula id="ieqn-21">
<mml:math id="mml-ieqn-21"><mml:mrow><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math>
</inline-formula> implies the place data of <inline-formula id="ieqn-22">
<mml:math id="mml-ieqn-22"><mml:mi>i</mml:mi></mml:math>
</inline-formula>th sparrow from the jth dimensional. <inline-formula id="ieqn-23">
<mml:math id="mml-ieqn-23"><mml:mi>&#x03B1;</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mn>0</mml:mn><mml:mo>,</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math>
</inline-formula> refers the arbitrary number; <inline-formula id="ieqn-24">
<mml:math id="mml-ieqn-24"><mml:mi>Q</mml:mi></mml:math>
</inline-formula> stands for arbitrary number which follows a normal distribution; <inline-formula id="ieqn-25">
<mml:math id="mml-ieqn-25"><mml:mi>L</mml:mi></mml:math>
</inline-formula> represents the matrix with every 1 element, and the size is <inline-formula id="ieqn-26">
<mml:math id="mml-ieqn-26"><mml:mn>1</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:mi>d</mml:mi></mml:math>
</inline-formula>; <inline-formula id="ieqn-27">
<mml:math id="mml-ieqn-27"><mml:mi>R</mml:mi><mml:mn>2</mml:mn><mml:mo>&#x2208;</mml:mo><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mn>0</mml:mn><mml:mo>,</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math>
</inline-formula> and <inline-formula id="ieqn-28">
<mml:math id="mml-ieqn-28"><mml:mi>S</mml:mi><mml:mi>T</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /><mml:mn>0.5</mml:mn><mml:mo>,</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math>
</inline-formula> correspondingly refer the initial warning value and safety value. If the <inline-formula id="ieqn-29">
<mml:math id="mml-ieqn-29"><mml:mrow><mml:msub><mml:mi>R</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mo>&#x003C;</mml:mo><mml:mi>S</mml:mi><mml:mi>T</mml:mi></mml:math>
</inline-formula>, it implies that predator is initiated under the foraging region, and the funder is conducted an extreme search; when <inline-formula id="ieqn-30">
<mml:math id="mml-ieqn-30"><mml:mrow><mml:msub><mml:mi>R</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mo>&#x003E;</mml:mo><mml:mi>S</mml:mi><mml:mi>T</mml:mi></mml:math>
</inline-formula>, it represents that predator is established from the foraging region, and initial warning Sparrow sends warning signal to another sparrow, and every sparrow from the population can fly to safe region to the food.</p>
<p>But the finder, the residual sparrows have every joiner as well as upgrade its places based on the subsequent equation:</p>
<p><disp-formula id="eqn-3"><label>(3)</label>
<mml:math id="mml-eqn-3" display="block"><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mrow><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>Q</mml:mi><mml:mo>&#x22C5;</mml:mo><mml:mi>exp</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mfrac><mml:mrow><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>w</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mi>t</mml:mi></mml:msubsup><mml:mo>&#x2212;</mml:mo><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mi>t</mml:mi></mml:msubsup></mml:mrow><mml:mrow><mml:mrow><mml:msup><mml:mi>i</mml:mi><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:mrow></mml:mfrac></mml:mrow></mml:mstyle></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mtd><mml:mtd><mml:mrow><mml:mi>i</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thickmathspace" /><mml:mi>i</mml:mi><mml:mo>&#x003E;</mml:mo><mml:mi>n</mml:mi><mml:mrow><mml:mo>/</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:msubsup><mml:mi>X</mml:mi><mml:mi>P</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mo>+</mml:mo><mml:mrow><mml:mo>|</mml:mo><mml:mrow><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mi>t</mml:mi></mml:msubsup><mml:mo>&#x2212;</mml:mo><mml:msubsup><mml:mi>X</mml:mi><mml:mi>P</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup></mml:mrow><mml:mo>|</mml:mo></mml:mrow><mml:mo>&#x22C5;</mml:mo><mml:mrow><mml:msup><mml:mi>A</mml:mi><mml:mo>+</mml:mo></mml:msup></mml:mrow><mml:mo>&#x22C5;</mml:mo><mml:mi>L</mml:mi></mml:mrow></mml:mtd><mml:mtd><mml:mrow><mml:mi>O</mml:mi><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>w</mml:mi><mml:mi>i</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo fence="true" stretchy="true" symmetric="true"></mml:mo></mml:mrow></mml:math>
</disp-formula></p>
<p>where <inline-formula id="ieqn-31">
<mml:math id="mml-ieqn-31"><mml:mrow><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>w</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math>
</inline-formula> signifies the present worse place from the world; <inline-formula id="ieqn-32">
<mml:math id="mml-ieqn-32"><mml:mrow><mml:msub><mml:mi>X</mml:mi><mml:mi>p</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> implies the optimum place of finder under the present state; A signifies a <inline-formula id="ieqn-33">
<mml:math id="mml-ieqn-33"><mml:mn>1</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:mi>d</mml:mi></mml:math>
</inline-formula> matrix, where all elements are arbitrarily allocated a value of 1 or &#x2013;1, and A&#x002A; &#x003D; AT(AAT)-1. If <inline-formula id="ieqn-34">
<mml:math id="mml-ieqn-34"><mml:mi>i</mml:mi><mml:mo>&#x003E;</mml:mo><mml:mi>n</mml:mi><mml:mrow><mml:mo>/</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:math>
</inline-formula>, it signifies that because of minimum fitness, the <inline-formula id="ieqn-35">
<mml:math id="mml-ieqn-35"><mml:mi>i</mml:mi></mml:math>
</inline-formula>th joiner is no food and is extremely hungry, therefore it requires for flying to another place for finding food for obtaining superior energy. <xref ref-type="fig" rid="fig-1">Fig. 1</xref> demonstrates the flowchart of SSA.</p>
<fig id="fig-1">
<label>Figure 1</label>
<caption>
<title>Flowchart of SSA</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="CSSE_24674-fig-1.png"/>
</fig>
<p>If the danger has been initiated, the sparrow set is conduct anti-predation performance and their place was upgraded as:</p>
<p><disp-formula id="eqn-4"><label>(4)</label>
<mml:math id="mml-eqn-4" display="block"><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mrow><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mi>t</mml:mi></mml:msubsup><mml:mo>+</mml:mo><mml:mi>&#x03B2;</mml:mi><mml:mo>&#x22C5;</mml:mo><mml:mrow><mml:mo>|</mml:mo><mml:mrow><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mi>t</mml:mi></mml:msubsup><mml:mo>&#x2212;</mml:mo><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mi>t</mml:mi></mml:msubsup></mml:mrow><mml:mo>|</mml:mo></mml:mrow></mml:mrow></mml:mtd><mml:mtd><mml:mrow><mml:mi>i</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thickmathspace" /><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>&#x003E;</mml:mo><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>g</mml:mi></mml:msub></mml:mrow></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mrow><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mi>t</mml:mi></mml:msubsup><mml:mo>+</mml:mo><mml:mi>K</mml:mi><mml:mo>&#x22C5;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mfrac><mml:mrow><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mi>t</mml:mi></mml:msubsup><mml:mo>&#x2212;</mml:mo><mml:msubsup><mml:mi>X</mml:mi><mml:mrow><mml:mi>w</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mi>t</mml:mi></mml:msubsup></mml:mrow><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>w</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>+</mml:mo><mml:mi>e</mml:mi></mml:mrow></mml:mfrac></mml:mrow></mml:mstyle></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mtd><mml:mtd><mml:mrow><mml:mi>i</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thickmathspace" /><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>g</mml:mi></mml:msub></mml:mrow></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:mrow><mml:mo fence="true" stretchy="true" symmetric="true"></mml:mo></mml:mrow><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /></mml:math>
</disp-formula></p>
<p>During the equation: <inline-formula id="ieqn-36">
<mml:math id="mml-ieqn-36"><mml:mi>&#x03B2;</mml:mi></mml:math>
</inline-formula> indicates the step length control parameters that are normal distribution arbitrary number with mean value of zero and variance of one; <inline-formula id="ieqn-37">
<mml:math id="mml-ieqn-37"><mml:mi>K</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math>
</inline-formula> refers the arbitrary number which signifies the way that sparrow move, combine a step length control Parameter; <inline-formula id="ieqn-38">
<mml:math id="mml-ieqn-38"><mml:mi>e</mml:mi></mml:math>
</inline-formula> demonstrates the fixed to a less constant for preventing the denominator in being zero; <inline-formula id="ieqn-39">
<mml:math id="mml-ieqn-39"><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> stands for the fitness value of <inline-formula id="ieqn-40">
<mml:math id="mml-ieqn-40"><mml:mi>i</mml:mi></mml:math>
</inline-formula>th sparrow, <inline-formula id="ieqn-41">
<mml:math id="mml-ieqn-41"><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>g</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> and <inline-formula id="ieqn-42">
<mml:math id="mml-ieqn-42"><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>w</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> defines the optimum and least fitness values of present population correspondingly. If <inline-formula id="ieqn-43">
<mml:math id="mml-ieqn-43"><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>&#x003E;</mml:mo><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>g</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, it represents that the sparrow has edge of populations and is simply attacked by predator; if <inline-formula id="ieqn-44">
<mml:math id="mml-ieqn-44"><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:msub><mml:mi>f</mml:mi><mml:mi>g</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, it denotes that sparrow from the middle of population is informed of the risk of predators and required to clump along with other sparrows for reducing predation threat.</p>
</sec>
<sec id="s3_4">
<label>3.4</label>
<title>Image Classification</title>
<p>At the final stage, the FLNN model receives the feature vectors and assigns class labels to the input test images. Usually, the functional link-based NN techniques are single-layer artificial neural network (ANN) framework modelling maximum rate of convergence and minimum computational loads than individuals of multi layer perceptron (MLP) framework. The performance and mapping capability of ANN and their function to channel equalization have been described. The mathematical process and computational are estimated as per MLP. Patra initially presented functional link ANN (FLNN), and it can be new single-layer ANN framework able of creating randomly difficult decision regions with creating non-linear decision boundaries [<xref ref-type="bibr" rid="ref-22">22</xref>]. <xref ref-type="fig" rid="fig-2">Fig. 2</xref> illustrates the framework of FLNN model. In FLNN, the hidden layer is eliminated. In addition, the FLNN design offers minimum computational difficulty and superior convergence speed than individuals of MLP due to their single-layer framework. At this point, the functional development block creates utilize of functional method containing a subset of orthogonal sin and cos fundamental function and the novel design together with their outer product. The BP technique that is utilized for training the network, develops very easily due to absence of some hidden layer.</p>
<fig id="fig-2">
<label>Figure 2</label>
<caption>
<title>Structure of FLNN</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="CSSE_24674-fig-2.png"/>
</fig>
</sec>
</sec>
<sec id="s4">
<label>4</label>
<title>Performance Validation</title>
<p>This section investigates the BT classification performance of the IoMTC-HDBT technique against benchmark BT dataset from Kaggle repository [<xref ref-type="bibr" rid="ref-23">23</xref>]. The dataset comprises 98 images under Normal class and 155 images under Abnormal class. The dataset includes images with the size of 192 &#x002A; 192 to 630 &#x002A; 630. <xref ref-type="fig" rid="fig-3">Fig. 3</xref> illustrates a few sample images.</p>
<fig id="fig-3">
<label>Figure 3</label>
<caption>
<title>Sample images</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="CSSE_24674-fig-3.png"/>
</fig>
<p>The set of confusion matrices generated by the IoMTC-HDBT technique on the test dataset is given in <xref ref-type="fig" rid="fig-4">Fig. 4</xref>. The figures show that the IoMTC-HDBT technique has effectually identified the images into Normal and Abnormal classes. For instance, with run-1, the IoMTC-HDBT technique has identified 96 images into Normal and 152 images into Abnormal class. Likewise, with run-4, the IoMTC-HDBT approach has identified 85 images into Normal and 153 images into Abnormal class. Also, with run-6, the IoMTC-HDBT manner has identified 98 images into Normal and 154 images into Abnormal class. In addition, with run-8, the IoMTC-HDBT method has identified 95 images into Normal and 153 images into Abnormal class. Lastly, with run-10, the IoMTC-HDBT algorithm has identified 98 images into Normal and 152 images into Abnormal class.</p>
<fig id="fig-4">
<label>Figure 4</label>
<caption>
<title>Confusion matrix of IoMTC-HDBT model</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="CSSE_24674-fig-4.png"/>
</fig>
<p><xref ref-type="table" rid="table-1">Tab. 1</xref> and <xref ref-type="fig" rid="fig-5">Fig. 5</xref> offer a brief classification results analysis of the IoMTC-HDBT technique under ten distinct runs. The experimental results highlighted that the IoMTC-HDBT technique has proficiently classified the images under various runs. For instance, with run-1, the IoMTC-HDBT technique has attained <inline-formula id="ieqn-45">
<mml:math id="mml-ieqn-45"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, <inline-formula id="ieqn-46">
<mml:math id="mml-ieqn-46"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mrow><mml:msub><mml:mi>e</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, <inline-formula id="ieqn-47">
<mml:math id="mml-ieqn-47"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, and <inline-formula id="ieqn-48">
<mml:math id="mml-ieqn-48"><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>s</mml:mi><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.9796, 0.9806, 0.9802, and 0.9746 respectively. Eventually, with run-4, the IoMTC-HDBT system has gained <inline-formula id="ieqn-49">
<mml:math id="mml-ieqn-49"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, <inline-formula id="ieqn-50">
<mml:math id="mml-ieqn-50"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mrow><mml:msub><mml:mi>e</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, <inline-formula id="ieqn-51">
<mml:math id="mml-ieqn-51"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, and <inline-formula id="ieqn-52">
<mml:math id="mml-ieqn-52"><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>s</mml:mi><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.9694, 0.9871, 0.9802, and 0.9744 correspondingly. Meanwhile, with run-6, the IoMTC-HDBT approach has reached <inline-formula id="ieqn-53">
<mml:math id="mml-ieqn-53"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, <inline-formula id="ieqn-54">
<mml:math id="mml-ieqn-54"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mrow><mml:msub><mml:mi>e</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, <inline-formula id="ieqn-55">
<mml:math id="mml-ieqn-55"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, and <inline-formula id="ieqn-56">
<mml:math id="mml-ieqn-56"><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>s</mml:mi><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math>
</inline-formula> of 1.0000, 0.9935, 0.9960, and 0.9949 correspondingly. Concurrently, with run-8, the IoMTC-HDBT technique has attained <inline-formula id="ieqn-57">
<mml:math id="mml-ieqn-57"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, <inline-formula id="ieqn-58">
<mml:math id="mml-ieqn-58"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mrow><mml:msub><mml:mi>e</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, <inline-formula id="ieqn-59">
<mml:math id="mml-ieqn-59"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, and <inline-formula id="ieqn-60">
<mml:math id="mml-ieqn-60"><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>s</mml:mi><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.9694, 0.9871, 0.9802, and 0.9744 respectively. Likewise, with run-10, the IoMTC-HDBT methodology has obtained <inline-formula id="ieqn-61">
<mml:math id="mml-ieqn-61"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, <inline-formula id="ieqn-62">
<mml:math id="mml-ieqn-62"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mrow><mml:msub><mml:mi>e</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, <inline-formula id="ieqn-63">
<mml:math id="mml-ieqn-63"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, and <inline-formula id="ieqn-64">
<mml:math id="mml-ieqn-64"><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>s</mml:mi><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math>
</inline-formula> of 1.0000, 0.9806, 0.9881, and 0.9849 correspondingly.</p>
<table-wrap id="table-1"><label>Table 1</label>
<caption>
<title>Result analysis of IoMTC-HDBT model with different measures</title></caption>
<table><colgroup>
<col/>
<col/>
<col/>
<col/>
<col/>
</colgroup>
<thead>
<tr>
<th>No. of runs</th>
<th>Sensitivity</th>
<th>Specificity</th>
<th>Accuracy</th>
<th>F-Score</th>
</tr>
</thead>
<tbody>
<tr>
<td>Run-1</td>
<td>0.9796</td>
<td>0.9806</td>
<td>0.9802</td>
<td>0.9746</td>
</tr>
<tr>
<td>Run-2</td>
<td>0.9694</td>
<td>0.9871</td>
<td>0.9802</td>
<td>0.9744</td>
</tr>
<tr>
<td>Run-3</td>
<td>0.9796</td>
<td>0.9871</td>
<td>0.9842</td>
<td>0.9796</td>
</tr>
<tr>
<td>Run-4</td>
<td>0.9694</td>
<td>0.9871</td>
<td>0.9802</td>
<td>0.9744</td>
</tr>
<tr>
<td>Run-5</td>
<td>0.9898</td>
<td>0.9806</td>
<td>0.9842</td>
<td>0.9798</td>
</tr>
<tr>
<td>Run-6</td>
<td>1.0000</td>
<td>0.9935</td>
<td>0.9960</td>
<td>0.9949</td>
</tr>
<tr>
<td>Run-7</td>
<td>0.9796</td>
<td>0.9871</td>
<td>0.9842</td>
<td>0.9796</td>
</tr>
<tr>
<td>Run-8</td>
<td>0.9694</td>
<td>0.9871</td>
<td>0.9802</td>
<td>0.9744</td>
</tr>
<tr>
<td>Run-9</td>
<td>0.9694</td>
<td>0.9935</td>
<td>0.9842</td>
<td>0.9794</td>
</tr>
<tr>
<td>Run-10</td>
<td>1.0000</td>
<td>0.9806</td>
<td>0.9881</td>
<td>0.9849</td>
</tr>
<tr>
<td>Average</td>
<td>0.9806</td>
<td>0.9864</td>
<td>0.9842</td>
<td>0.9796</td>
</tr>
</tbody>
</table>
</table-wrap>
<fig id="fig-5">
<label>Figure 5</label>
<caption>
<title>Result analysis of IoMTC-HDBT model with varying measures</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="CSSE_24674-fig-5.png"/>
</fig>
<p><xref ref-type="fig" rid="fig-6">Fig. 6</xref> illustrates the set of receiver operating characteristic (ROC) analyses of the IoMTC-HDBT technique under ten distinct runs. These figures demonstrated that the IoMTC-HDBT technique has resulted in maximum ROC values under every test run.</p>
<fig id="fig-6">
<label>Figure 6</label>
<caption>
<title>Average analysis of IoMTC-HDBT model</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="CSSE_24674-fig-6.png"/>
</fig>
<p><xref ref-type="fig" rid="fig-7">Fig. 7</xref> demonstrates the overall average classification results analysis of the IoMTC-HDBT technique on the test images applied. The figure shows that the IoMTC-HDBT technique has reached to maximum average <inline-formula id="ieqn-65">
<mml:math id="mml-ieqn-65"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, <inline-formula id="ieqn-66">
<mml:math id="mml-ieqn-66"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mrow><mml:msub><mml:mi>e</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, <inline-formula id="ieqn-67">
<mml:math id="mml-ieqn-67"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula>, and <inline-formula id="ieqn-68">
<mml:math id="mml-ieqn-68"><mml:mrow><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>s</mml:mi><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.9806, 0.9864, 0.9842, and 0.9796 respectively.</p>
<fig id="fig-7">
<label>Figure 7</label>
<caption>
<title>ROC analysis of IoMTC-HDBT model under 10 runs</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="CSSE_24674-fig-7.png"/>
</fig>
<p>In order to showcase the outstanding performance of the IoMTC-HDBT technique, a comparative results analysis is made in <xref ref-type="table" rid="table-2">Tab. 2</xref>. <xref ref-type="fig" rid="fig-8">Fig. 8</xref> showcases the <inline-formula id="ieqn-69">
<mml:math id="mml-ieqn-69"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> analysis of the IoMTC-HDBT technique with recent approaches. The figure shows that the method devised by Anitha et al. and Urban et al. methods have attained minimal <inline-formula id="ieqn-70">
<mml:math id="mml-ieqn-70"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.912 and 0.926 respectively. In addition, the methods developed by Pereira et al. and Islam et al. have attained moderate <inline-formula id="ieqn-71">
<mml:math id="mml-ieqn-71"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.942 and 0.943 respectively. On continuing with, the techniques presented by Selvapandian et al. and Ganesan et al. have resulted in near optimal <inline-formula id="ieqn-72">
<mml:math id="mml-ieqn-72"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.962 and 0.972. However, the IoMTC-HDBT technique has outperformed the other techniques with the maximum <inline-formula id="ieqn-73">
<mml:math id="mml-ieqn-73"><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.981.</p>
<table-wrap id="table-2"><label>Table 2</label>
<caption>
<title>Comparative analysis of IoMTC-HDBT model with existing approaches</title></caption>
<table><colgroup>
<col/>
<col/>
<col/>
<col/>
</colgroup>
<thead>
<tr>
<th>Methods</th>
<th>Sensitivity</th>
<th>Specificity</th>
<th>Accuracy</th>
</tr>
</thead>
<tbody>
<tr>
<td>IoMTC-HDBT</td>
<td>0.981</td>
<td>0.986</td>
<td>0.984</td>
</tr>
<tr>
<td>Ganesan et al. (2020)</td>
<td>0.972</td>
<td>0.988</td>
<td>0.981</td>
</tr>
<tr>
<td>Selvapandian et al. (2018)</td>
<td>0.962</td>
<td>0.951</td>
<td>0.964</td>
</tr>
<tr>
<td>Anitha et al. (2017)</td>
<td>0.912</td>
<td>0.934</td>
<td>0.933</td>
</tr>
<tr>
<td>Pereira et al. (2016)</td>
<td>0.942</td>
<td>0.944</td>
<td>0.946</td>
</tr>
<tr>
<td>Urban et al. (2014)</td>
<td>0.926</td>
<td>0.930</td>
<td>0.933</td>
</tr>
<tr>
<td>Islam et al. (2013)</td>
<td>0.943</td>
<td>0.951</td>
<td>0.959</td>
</tr>
</tbody>
</table>
</table-wrap>
<fig id="fig-8">
<label>Figure 8</label>
<caption>
<title>Sensitivity analysis of IoMTC-HDBT technique</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="CSSE_24674-fig-8.png"/>
</fig>
<p><xref ref-type="fig" rid="fig-9">Fig. 9</xref> illustrates the <inline-formula id="ieqn-74">
<mml:math id="mml-ieqn-74"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> analysis of the IoMTC-HDBT approach with recent algorithms. The figure outperformed that the method devised by Anitha et al. and Urban et al. manners have obtained minimal <inline-formula id="ieqn-75">
<mml:math id="mml-ieqn-75"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.934 and 0.930 correspondingly. Besides, the methods established by Pereira et al. and Islam et al. have attained moderate <inline-formula id="ieqn-76">
<mml:math id="mml-ieqn-76"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.944 and 0.951 correspondingly. In line with, the techniques projected by Selvapandian et al. and Ganesan et al. have resulted in near optimal <inline-formula id="ieqn-77">
<mml:math id="mml-ieqn-77"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.951 and 0.988. But, the IoMTC-HDBT method has exhibited the other manners with the maximal <inline-formula id="ieqn-78">
<mml:math id="mml-ieqn-78"><mml:mi>s</mml:mi><mml:mi>p</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.986.</p>
<fig id="fig-9">
<label>Figure 9</label>
<caption>
<title>Specificity analysis of IoMTC-HDBT technique</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="CSSE_24674-fig-9.png"/>
</fig>
<p><xref ref-type="fig" rid="fig-10">Fig. 10</xref> depicts the <inline-formula id="ieqn-79">
<mml:math id="mml-ieqn-79"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> analysis of the IoMTC-HDBT manner with recent systems. The figure demonstrated that the method devised by Anitha et al. and Urban et al. approaches have attained minimal <inline-formula id="ieqn-80">
<mml:math id="mml-ieqn-80"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow><mml:mspace width="thickmathspace" /></mml:math>
</inline-formula>of 0.933 and 0.933 respectively. Also, the methods developed by Pereira et al. and Islam et al. have attained moderate <inline-formula id="ieqn-81">
<mml:math id="mml-ieqn-81"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.946 and 0.959 respectively. Followed by, the techniques presented by Selvapandian et al. and Ganesan et al. have resulted in near optimal <inline-formula id="ieqn-82">
<mml:math id="mml-ieqn-82"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.964 and 0.981. Finally, the IoMTC-HDBT methodology has showcased the other techniques with the increased <inline-formula id="ieqn-83">
<mml:math id="mml-ieqn-83"><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mrow><mml:msub><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:msub></mml:mrow></mml:math>
</inline-formula> of 0.984.</p>
<fig id="fig-10">
<label>Figure 10</label>
<caption>
<title>Accuracy analysis of IoMTC-HDBT technique</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="CSSE_24674-fig-10.png"/>
</fig>
<p>By looking into these results analysis, it is apparent that the IoMTC-HDBT technique can be used as an effectual tool for BT diagnosis in the IoMT environment.</p>
</sec>
<sec id="s5">
<label>5</label>
<title>Conclusion</title>
<p>In this study, a novel IoMTC-HDBT technique has been presented to detect and classify the BT using medical images. The proposed IoMTC-HDBT technique encompasses AWF based image filtering, GoogleNet based feature extraction, SSA based hyperparameter tuning, and FLNN based classification. The IoMTC-HDBT technique has the ability to detect and classify the MRI brain images as normal or abnormal. It is also found useful to generate the reports instantly for patients located in remote areas. The validation of the IoMTC-HDBT model takes place against BRATS2015 Challenge dataset and the experimental analysis is carried out interms of sensitivity, accuracy, and specificity. The experimentation outcome pointed out the betterment of the proposed model on all the applied test images. In future, the IoMTC-HDBT technique can be extended to diagnose lung cancer, heart disease, etc.</p>
</sec>
</body>
<back><fn-group>
<fn fn-type="other">
<p><bold>Funding Statement:</bold> This study was supported by the grants of the Korea Health Technology R&#x0026;D Project through the Korea Health Industry Development Institute (KHIDI), funded by the Ministry of Health &#x0026; Welfare (HI18C1216), the grant of the National Research Foundation of Korea (NRF-2020R1I1A1A01074256) and the Soonchunhyang University Research Fund.</p>
</fn>
<fn fn-type="conflict">
<p><bold>Conflicts of Interest:</bold> The authors declare that they have no conflicts of interest to report regarding the present study.</p>
</fn>
</fn-group>
<ref-list content-type="authoryear">
<title>References</title>
<ref id="ref-1"><label>[1]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Hashem</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Vellappally</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Fouad</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Luqman</surname></string-name> and <string-name><given-names>A. E.</given-names> <surname>Youssef</surname></string-name></person-group>, &#x201C;<article-title>Predicting neurological disorders linked to oral cavity manifestations using an iomt-based optimized neural networks</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>8</volume>, pp. <fpage>190722</fpage>&#x2013;<lpage>190733</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-2"><label>[2]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Ghubaish</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Salman</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Zolanvari</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Unal</surname></string-name>, <string-name><given-names>A. A.</given-names> <surname>Ali</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Recent advances in the internet-of-medical-things (iomt) systems security</article-title>,&#x201D; <source>IEEE Internet of Things Journal</source>, vol. <volume>8</volume>, no. <issue>11</issue>, pp. <fpage>8707</fpage>&#x2013;<lpage>8718</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-3"><label>[3]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Almogren</surname></string-name>, <string-name><given-names>I.</given-names> <surname>Mohiuddin</surname></string-name>, <string-name><given-names>I. U.</given-names> <surname>Din</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Almajed</surname></string-name> and <string-name><given-names>N.</given-names> <surname>Guizani</surname></string-name></person-group>, &#x201C;<article-title>FTM-IoMT: Fuzzy-based trust management for preventing sybil attacks in internet of medical things</article-title>,&#x201D; <source>IEEE Internet of Things Journal</source>, vol. <volume>8</volume>, no. <issue>6</issue>, pp. <fpage>4485</fpage>&#x2013;<lpage>4497</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-4"><label>[4]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>Z.</given-names> <surname>Yuldashev</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Sergeev</surname></string-name> and <string-name><given-names>N.</given-names> <surname>Nastueva</surname></string-name></person-group>, &#x201C;<article-title>IoMT technology as the basis of wearable online monitors for space distributed monitoring systems for pregnant women</article-title>,&#x201D; in <conf-name>2021 Wave Electronics and its Application in Information and Telecommunication Systems (WECONF)</conf-name>, <publisher-loc>St. Petersburg, Russia</publisher-loc>, pp. <fpage>1</fpage>&#x2013;<lpage>4</lpage>, <year>2021</year>. </mixed-citation></ref>
<ref id="ref-5"><label>[5]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Vishnu</surname></string-name>, <string-name><given-names>S. R. J.</given-names> <surname>Ramson</surname></string-name> and <string-name><given-names>R.</given-names> <surname>Jegan</surname></string-name></person-group>, &#x201C;<article-title>Internet of medical things (IoMT)-An overview</article-title>,&#x201D; in <conf-name>2020 5th Int. Conf. on Devices, Circuits and Systems (ICDCS)</conf-name>, <publisher-loc>Coimbatore, India</publisher-loc>, pp. <fpage>101</fpage>&#x2013;<lpage>104</lpage>, <year>2020</year>. </mixed-citation></ref>
<ref id="ref-6"><label>[6]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Bauer</surname></string-name>, <string-name><given-names>C.</given-names> <surname>May</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Dionysiou</surname></string-name>, <string-name><given-names>G.</given-names> <surname>Stamatakos</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Buchler</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Multiscale modeling for image analysis of brain tumor studies</article-title>,&#x201D; <source>IEEE Transactions on Biomedical Engineering</source>, vol. <volume>59</volume>, no. <issue>1</issue>, pp. <fpage>25</fpage>&#x2013;<lpage>29</lpage>, <year>2012</year>.</mixed-citation></ref>
<ref id="ref-7"><label>[7]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>G.</given-names> <surname>Mohan</surname></string-name> and <string-name><given-names>M. M.</given-names> <surname>Subashini</surname></string-name></person-group>, &#x201C;<article-title>MRI based medical image analysis: Survey on brain tumor grade classification</article-title>,&#x201D; <source>Biomedical Signal Processing and Control</source>, vol. <volume>39</volume>, pp. <fpage>139</fpage>&#x2013;<lpage>161</lpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-8"><label>[8]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J.</given-names> <surname>Ker</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Bai</surname></string-name>, <string-name><given-names>H. Y.</given-names> <surname>Lee</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Rao</surname></string-name> and <string-name><given-names>L.</given-names> <surname>Wang</surname></string-name></person-group>, &#x201C;<article-title>Automated brain histology classification using machine learning</article-title>,&#x201D; <source>Journal of Clinical Neuroscience</source>, vol. <volume>66</volume>, no. <issue>1</issue>, pp. <fpage>239</fpage>&#x2013;<lpage>245</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-9"><label>[9]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J.</given-names> <surname>Ker</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Wang</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Rao</surname></string-name> and <string-name><given-names>T.</given-names> <surname>Lim</surname></string-name></person-group>, &#x201C;<article-title>Deep learning applications in medical image analysis</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>6</volume>, pp. <fpage>9375</fpage>&#x2013;<lpage>9389</lpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-10"><label>[10]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Pal</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Dong</surname></string-name>, <string-name><given-names>B.</given-names> <surname>Thapa</surname></string-name>, <string-name><given-names>N. V.</given-names> <surname>Chawla</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Swami</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Deep learning for network analysis: Problems, approaches and challenges</article-title>,&#x201D; in <conf-name>MILCOM, 2016-2016 IEEE Military Communications Conf.</conf-name>, <publisher-loc>Baltimore, MD, USA</publisher-loc>, pp. <fpage>588</fpage>&#x2013;<lpage>593</lpage>, <year>2016</year>. </mixed-citation></ref>
<ref id="ref-11"><label>[11]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>M. A. B.</given-names> <surname>Siddique</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Sakib</surname></string-name>, <string-name><given-names>M. M. R.</given-names> <surname>Khan</surname></string-name>, <string-name><given-names>A. K.</given-names> <surname>Tanzeem</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Chowdhury</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Deep convolutional neural networks model-based brain tumor detection in brain MRI images</article-title>,&#x201D; in <conf-name>2020 Fourth Int. Conf. on I-SMAC (IoT in Social, Mobile, Analytics and Cloud) (I-SMAC)</conf-name>, <conf-loc>Palladam, India</conf-loc>, pp. <fpage>909</fpage>&#x2013;<lpage>914</lpage>, <year>2020</year>. </mixed-citation></ref>
<ref id="ref-12"><label>[12]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Z.</given-names> <surname>Huang</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Du</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Chen</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Li</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Liu</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Convolutional neural network based on complex networks for brain tumor image classification with a modified activation function</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>8</volume>, pp. <fpage>89281</fpage>&#x2013;<lpage>89290</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-13"><label>[13]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>M. L.</given-names> <surname>Rahman</surname></string-name>, <string-name><given-names>S. H.</given-names> <surname>Shehab</surname></string-name>, <string-name><given-names>Z. H.</given-names> <surname>Chowdhury</surname></string-name> and <string-name><given-names>A. K.</given-names> <surname>Datta</surname></string-name></person-group>, &#x201C;<article-title>Predicting the possibility of being malignant tumor based on physical symptoms using IoT</article-title>,&#x201D; in <conf-name>2020 IEEE Region 10 Sym. (TENSYMP)</conf-name>, <publisher-loc>Dhaka, Bangladesh</publisher-loc>, pp. <fpage>26</fpage>&#x2013;<lpage>30</lpage>, <year>2020</year>. </mixed-citation></ref>
<ref id="ref-14"><label>[14]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>S. A. K. A.</given-names> <surname>Nahiun</surname></string-name>, <string-name><given-names>F.</given-names> <surname>Mahbub</surname></string-name>, <string-name><given-names>R.</given-names> <surname>Islam</surname></string-name>, <string-name><given-names>S. B.</given-names> <surname>Akash</surname></string-name>, <string-name><given-names>R. R.</given-names> <surname>Hasan</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>Performance analysis of microstrip patch antenna for the diagnosis of brain cancer &#x0026; tumor using the fifth-generation frequency band</article-title>,&#x201D; in <conf-name>2021 IEEE Int. IOT, Electronics and Mechatronics Conf. (IEMTRONICS)</conf-name>, <conf-loc>Toronto, ON, Canada</conf-loc>, pp. <fpage>1</fpage>&#x2013;<lpage>6</lpage>, <year>2021</year>. </mixed-citation></ref>
<ref id="ref-15"><label>[15]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>N.</given-names> <surname>Noreen</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Palaniappan</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Qayyum</surname></string-name>, <string-name><given-names>I.</given-names> <surname>Ahmad</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Imran</surname></string-name> <etal>et al.</etal></person-group><italic>,</italic> &#x201C;<article-title>A deep learning model based on concatenation approach for the diagnosis of brain tumor</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>8</volume>, pp. <fpage>55135</fpage>&#x2013;<lpage>55144</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-16"><label>[16]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>N. M.</given-names> <surname>Aboelenein</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Songhao</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Koubaa</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Noor</surname></string-name> and <string-name><given-names>A.</given-names> <surname>Afifi</surname></string-name></person-group>, &#x201C;<article-title>HTTU-Net: Hybrid two track u-net for automatic brain tumor segmentation</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>8</volume>, pp. <fpage>101406</fpage>&#x2013;<lpage>101415</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-17"><label>[17]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>H. H.</given-names> <surname>Sultan</surname></string-name>, <string-name><given-names>N. M.</given-names> <surname>Salem</surname></string-name> and <string-name><given-names>W.</given-names> <surname>Al-Atabany</surname></string-name></person-group>, &#x201C;<article-title>Multi-classification of brain tumor images using deep neural network</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>7</volume>, pp. <fpage>69215</fpage>&#x2013;<lpage>69225</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-18"><label>[18]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>L.</given-names> <surname>Tan</surname></string-name>, <string-name><given-names>W.</given-names> <surname>Ma</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Xia</surname></string-name> and <string-name><given-names>S.</given-names> <surname>Sarker</surname></string-name></person-group>, &#x201C;<article-title>Multimodal magnetic resonance image brain tumor segmentation based on ACU-Net network</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>9</volume>, pp. <fpage>14608</fpage>&#x2013;<lpage>14618</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-19"><label>[19]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>Y.</given-names> <surname>Ding</surname></string-name>, <string-name><given-names>Q.</given-names> <surname>Zhu</surname></string-name>, <string-name><given-names>Z.</given-names> <surname>Xing</surname></string-name> and <string-name><given-names>L.</given-names> <surname>Li</surname></string-name></person-group>, &#x201C;<article-title>An adaptive-fuzzy filter algorithm for vision preprocessing</article-title>,&#x201D; in <conf-name>2006 IEEE Int. Conf. on Robotics and Biomimetics</conf-name>, <conf-loc>Kunming, China</conf-loc>, pp. <fpage>578</fpage>&#x2013;<lpage>582</lpage>, <year>2006</year>. </mixed-citation></ref>
<ref id="ref-20"><label>[20]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Chu</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Lai</surname></string-name> and <string-name><given-names>J.</given-names> <surname>Liu</surname></string-name></person-group>, &#x201C;<article-title>Industrial control intrusion detection approach based on multiclassification GoogLeNet-LSTM model</article-title>,&#x201D; <source>Security and Communication Networks</source>, vol. <volume>2019</volume>, no. <issue>2</issue>, pp. <fpage>1</fpage>&#x2013;<lpage>11</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-21"><label>[21]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J.</given-names> <surname>Xue</surname></string-name> and <string-name><given-names>B.</given-names> <surname>Shen</surname></string-name></person-group>, &#x201C;<article-title>A novel swarm intelligence optimization approach: Sparrow search algorithm</article-title>,&#x201D; <source>Systems Science &#x0026; Control Engineering</source>, vol. <volume>8</volume>, no. <issue>1</issue>, pp. <fpage>22</fpage>&#x2013;<lpage>34</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-22"><label>[22]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S. K.</given-names> <surname>Nanda</surname></string-name> and <string-name><given-names>D. P.</given-names> <surname>Tripathy</surname></string-name></person-group>, &#x201C;<article-title>Application of functional link artificial neural network for prediction of machinery noise in opencast mines</article-title>,&#x201D; <source>Advances in Fuzzy Systems</source>, vol. <volume>2011</volume>, no. <issue>6</issue>, pp. <fpage>1</fpage>&#x2013;<lpage>11</lpage>, <year>2011</year>.</mixed-citation></ref>
<ref id="ref-23"><label>[23]</label><mixed-citation publication-type="other"><uri>https://www.kaggle.com/navoneel/brain-mri-images-for-brain-tumor-detection</uri>.</mixed-citation></ref>
</ref-list>
</back>
</article>