<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.1 20151215//EN" "http://jats.nlm.nih.gov/publishing/1.1/JATS-journalpublishing1.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:mml="http://www.w3.org/1998/Math/MathML" xml:lang="en" article-type="research-article" dtd-version="1.1">
<front>
<journal-meta>
<journal-id journal-id-type="pmc">CMC</journal-id>
<journal-id journal-id-type="nlm-ta">CMC</journal-id>
<journal-id journal-id-type="publisher-id">CMC</journal-id>
<journal-title-group>
<journal-title>Computers, Materials &#x0026; Continua</journal-title>
</journal-title-group>
<issn pub-type="epub">1546-2226</issn>
<issn pub-type="ppub">1546-2218</issn>
<publisher>
<publisher-name>Tech Science Press</publisher-name>
<publisher-loc>USA</publisher-loc>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">47961</article-id>
<article-id pub-id-type="doi">10.32604/cmc.2024.047961</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Article</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>A Novel Approach to Breast Tumor Detection: Enhanced Speckle Reduction and Hybrid Classification in Ultrasound Imaging</article-title>
<alt-title alt-title-type="left-running-head">A Novel Approach to Breast Tumor Detection: Enhanced Speckle Reduction and Hybrid Classification in Ultrasound Imaging</alt-title>
<alt-title alt-title-type="right-running-head">A Novel Approach to Breast Tumor Detection: Enhanced Speckle Reduction and Hybrid Classification in Ultrasound Imaging</alt-title>
</title-group>
<contrib-group>
<contrib id="author-1" contrib-type="author" corresp="yes">
<name name-style="western"><surname>Umapathi</surname><given-names>K.</given-names></name><xref ref-type="aff" rid="aff-1">1</xref><email>umapathi.uit@gmail.com</email></contrib>
<contrib id="author-2" contrib-type="author">
<name name-style="western"><surname>Shobana</surname><given-names>S.</given-names></name><xref ref-type="aff" rid="aff-1">1</xref></contrib>
<contrib id="author-3" contrib-type="author">
<name name-style="western"><surname>Nayyar</surname><given-names>Anand</given-names></name><xref ref-type="aff" rid="aff-2">2</xref></contrib>
<contrib id="author-4" contrib-type="author">
<name name-style="western"><surname>Justin</surname><given-names>Judith</given-names></name><xref ref-type="aff" rid="aff-3">3</xref></contrib>
<contrib id="author-5" contrib-type="author">
<name name-style="western"><surname>Vanithamani</surname><given-names>R.</given-names></name><xref ref-type="aff" rid="aff-3">3</xref></contrib>
<contrib id="author-6" contrib-type="author">
<name name-style="western"><surname>Galindo</surname><given-names>Miguel Villag&#x00F3;mez</given-names></name><xref ref-type="aff" rid="aff-4">4</xref></contrib>
<contrib id="author-7" contrib-type="author">
<name name-style="western"><surname>Ansari</surname><given-names>Mushtaq Ahmad</given-names></name><xref ref-type="aff" rid="aff-5">5</xref></contrib>
<contrib id="author-8" contrib-type="author" corresp="yes">
<name name-style="western"><surname>Panchal</surname><given-names>Hitesh</given-names></name><xref ref-type="aff" rid="aff-6">6</xref><email>engineerhitesh2000@gmail.com</email></contrib>
<aff id="aff-1"><label>1</label><institution>Department of Biomedical Engineering, KIT-Kalaignarkarunanidhi Institute of Technology</institution>, <addr-line>Coimbatore, 641402</addr-line>, <country>India</country></aff>
<aff id="aff-2"><label>2</label><institution>Graduate School, Faculty of Information Technology, Duy Tan University</institution>, <addr-line>Da Nang, 550000</addr-line>, <country>Viet Nam</country></aff>
<aff id="aff-3"><label>3</label><institution>Department of Biomedical and Instrumentation Engineering, Avinashilingam Institute for Home Science and Higher Education for Women</institution>, <addr-line>Coimbatore, 641043</addr-line>, <country>India</country></aff>
<aff id="aff-4"><label>4</label><institution>Universidad Michoacana de San Nicol&#x00E1;s de Hidalgo Avenida Francisco J. M&#x00FA;gica S/N Ciudad Universitaria</institution>, <addr-line>Morelia, CP, 58030</addr-line>, <country>Mexico</country></aff>
<aff id="aff-5"><label>5</label><institution>Department of Pharmacology and Toxicology, College of Pharmacy, King Saud University</institution>, <addr-line>Riyadh, 11451</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff-6"><label>6</label><institution>Department of Mechanical Engineering, Gujarat Technological Univeristy</institution>, <addr-line>Nigam Nagar, Chandkheda, Ahmedabad, Gujarat, 382424</addr-line>, <country>India</country></aff>
</contrib-group>
<author-notes>
<corresp id="cor1"><label>&#x002A;</label>Corresponding Authors: K. Umapathi. Email: <email>umapathi.uit@gmail.com</email>; Hitesh Panchal. Email: <email>engineerhitesh2000@gmail.com</email></corresp>
</author-notes>
<pub-date date-type="collection" publication-format="electronic">
<year>2024</year></pub-date>
<pub-date date-type="pub" publication-format="electronic"><day>15</day>
<month>5</month>
<year>2024</year></pub-date>
<volume>79</volume>
<issue>2</issue>
<fpage>1875</fpage>
<lpage>1901</lpage>
<history>
<date date-type="received">
<day>23</day>
<month>11</month>
<year>2023</year>
</date>
<date date-type="accepted">
<day>08</day>
<month>3</month>
<year>2024</year>
</date>
</history>
<permissions>
<copyright-statement>&#x00A9; 2024 Umapathi et al.</copyright-statement>
<copyright-year>2024</copyright-year>
<copyright-holder>Umapathi et al.</copyright-holder>
<license xlink:href="https://creativecommons.org/licenses/by/4.0/">
<license-p>This work is licensed under a <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution 4.0 International License</ext-link>, which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.</license-p>
</license>
</permissions>
<self-uri content-type="pdf" xlink:href="TSP_CMC_47961.pdf"></self-uri>
<abstract>
<p>Breast cancer detection heavily relies on medical imaging, particularly ultrasound, for early diagnosis and effective treatment. This research addresses the challenges associated with computer-aided diagnosis (CAD) of breast cancer from ultrasound images. The primary challenge is accurately distinguishing between malignant and benign tumors, complicated by factors such as speckle noise, variable image quality, and the need for precise segmentation and classification. The main objective of the research paper is to develop an advanced methodology for breast ultrasound image classification, focusing on speckle noise reduction, precise segmentation, feature extraction, and machine learning-based classification. A unique approach is introduced that combines Enhanced Speckle Reduced Anisotropic Diffusion (SRAD) filters for speckle noise reduction, U-NET-based segmentation, Genetic Algorithm (GA)-based feature selection, and Random Forest and Bagging Tree classifiers, resulting in a novel and efficient model. To test and validate the hybrid model, rigorous experimentations were performed and results state that the proposed hybrid model achieved accuracy rate of 99.9%, outperforming other existing techniques, and also significantly reducing computational time. This enhanced accuracy, along with improved sensitivity and specificity, makes the proposed hybrid model a valuable addition to CAD systems in breast cancer diagnosis, ultimately enhancing diagnostic accuracy in clinical applications.</p>
</abstract>
<kwd-group kwd-group-type="author">
<kwd>Ultrasound images</kwd>
<kwd>breast cancer</kwd>
<kwd>tumor classification</kwd>
<kwd>segmentation</kwd>
<kwd>deep learning</kwd>
<kwd>lesion detection</kwd>
</kwd-group>
<funding-group>
<award-group id="awg1">
<funding-source>King Saud University</funding-source>
<award-id>RSPD2024R996</award-id>
</award-group>
</funding-group>
</article-meta>
</front>
<body>
<sec id="s1">
<label>1</label>
<title>Introduction</title>
<p>As per World Health Organization (WHO), in the year 2020, there were about 2.3 million diagnosed breast cancer cases and 685,000 died of breast cancer worldwide. It constitutes about 12% of all cancer cases and 25% of cancer among women. In addition, the breast cancer mortality rate dropped by 40% between 1980 and 2020 in high-income countries especially. There are about 4 million breast cancer survivors in the world. The primary reason behind mortality reduction and survival is attributed to the early diagnosis of the disease supported by improved awareness among the population. The prima facia requirements given for tackling breast cancer are also the same (i) creating awareness and (ii) facilitating early diagnosis. As early diagnosis could result in early interventions and medications that could prevent the spread of malignancy hugely reducing the mortality risk factor. The risks associated with the malignancy being stated, the importance of diagnostic tools and their development are in the limelight. Though mammogram is a gold standard diagnostic tool, ultrasound imaging is a promising tool due to its effectiveness in diagnosing the disease stages from dense tissues easily. Thus, research that concentrates on improving ultrasound diagnosing efficiency is always gaining significance.</p>
<p>The paramount importance of early cancer detection and diagnosis cannot be overstated, especially when considering breast cancer, which stands as the most prevalent malignancy among women [<xref ref-type="bibr" rid="ref-1">1</xref>]. Alarmingly, 60% of cancer cases are diagnosed in advanced stages [<xref ref-type="bibr" rid="ref-2">2</xref>], highlighting the urgency of improving diagnostic methodologies. Current statistics reveal that about 2.26 million breast cancer cases are newly registered every year [<xref ref-type="bibr" rid="ref-3">3</xref>], further underscoring the pressing need for enhanced detection strategies for the early diagnosis of breast cancer.</p>
<sec id="s1_1">
<label>1.1</label>
<title>Objectives of the Paper</title>
<p>The objectives of the paper are:
<list list-type="order">
<list-item>
<p>To conduct a comprehensive study and provide an enlightened literature review with regard to the state of the art in artificial intelligence powered Ultrasound imaging based breast cancer screening.</p></list-item>
<list-item>
<p>To propose a novel methodology for the effective classification of breast tumor lesions from Ultrasound images that could aid in early diagnosis and thus treatment. A novel image processing pipeline with excellent pre-processing of US images by tailored eSRAD filtering followed by accurate segmentation of tumor lesions by U-NET DNN and feature selection by GA facilitated early classification of breast cancer stages with highest accuracy. The novelty of the model lies in the combination of techniques selected for improving performance in each stage of image processing which collectively resulted in an improved accuracy of classification.</p></list-item>
<list-item>
<p>To test and validate the proposed methodology in terms of performance metrices accuracy, sensitivity, specificity, precision, F-measure, Jaccard index, dice coefficient, Matthews Correlation Coefficient and Area -Under the Receiver Operating Characteristics (ROC) Curve (AUC).</p></list-item>
<list-item>
<p>To compare the proposed methodology with existing techniques like Random Forest, U-NET, Selective Kernel U-NET and GLCM based classification models.</p></list-item>
</list></p>
</sec>
<sec id="s1_2">
<label>1.2</label>
<title>Organization of Paper</title>
<p>The rest of the paper is organized as: <xref ref-type="sec" rid="s2">Section 2</xref> enlitens related works with regard to Breast cancer embarking all the contributions done by diverse researchers across the world for improvising diagnostics in breast cancer. <xref ref-type="sec" rid="s3">Section 3</xref> discusses Materials and Methods. <xref ref-type="sec" rid="s4">Section 4</xref> highlights proposed methodology. <xref ref-type="sec" rid="s5">Section 5</xref> focusses on Experimentations, Resuls and Analysis. And, finally, <xref ref-type="sec" rid="s6">Section 6</xref> concludes the paper with future scope.</p>
</sec>
</sec>
<sec id="s2">
<label>2</label>
<title>Related Work</title>
<p>While mammography remains a stalwart in breast cancer screening, ultrasound (US) imaging offers a cost-effective and easily accessible alternative [<xref ref-type="bibr" rid="ref-3">3</xref>]. Beyond affordability and accessibility, ultrasound&#x2019;s distinct advantage lies in its capacity to unveil the intricacies of dense breast tissue, a feat often elusive to mammography [<xref ref-type="bibr" rid="ref-4">4</xref>]. Breast tumor detection and classification in ultrasound imaging have been the focus of recent research efforts. Breast cancer is a significant health concern, and the accurate detection and classification of breast tumors are crucial for effective clinical management. Ultrasound imaging has emerged as a valuable tool for breast tumor detection, and recent research has focused on enhancing the accuracy of this modality through advanced image processing and classification techniques. Various novel approaches had been proposed to enhance the precision of breast ultrasound image analysis for identifying tissues, organs, and lesions. These approaches include the use of deep learning techniques such as convolutional neural networks (CNN), global average pooling (GAP)-guided attention loss function, generative adversarial networks (GAN), transfer learning (TL), and ensemble deep-learning-enabled clinical decision support systems.</p>
<p>A dual-input CNN with GAP-guided attention loss function was proposed for improved breast ultrasound tumor classification by Zou et al. [<xref ref-type="bibr" rid="ref-5">5</xref>], whereas a framework for breast mass classification from ultrasound images, incorporating GAN-based data augmentation and TL-based feature extraction was proposed by Chaudhary et al. [<xref ref-type="bibr" rid="ref-6">6</xref>]. Additionally, an ensemble deep-learning-enabled clinical decision support system was developed for breast cancer diagnosis and classification using ultrasound images by Ragab et al. [<xref ref-type="bibr" rid="ref-7">7</xref>]. Furthermore, the use of hybrid approaches, such as the CNN-Inception-V4-based hybrid approach for classifying breast cancer in mammogram images, was explored by Nazir et al. [<xref ref-type="bibr" rid="ref-8">8</xref>]. Several studies by diverse researchers had explored the application of deep learning algorithms for automatic breast tumor detection and classification using ultrasound images [<xref ref-type="bibr" rid="ref-9">9</xref>&#x2013;<xref ref-type="bibr" rid="ref-11">11</xref>]. These studies had demonstrated the potential of deep learning techniques in improving the efficiency and accuracy of breast tumor diagnosis. Additionally, the development of automated systems for breast tumor detection and classification, including automatic tumor volume estimation, using deep learning techniques had been a focus of research. Furthermore, the combination of different imaging modalities, such as photoacoustic tomography and ultrasound, had shown promise in improving the detection of breast tumors [<xref ref-type="bibr" rid="ref-12">12</xref>]. Moreover, the integration of microwave imaging techniques had also been explored for breast cancer detection, highlighting the diverse range of imaging modalities being investigated for this application [<xref ref-type="bibr" rid="ref-13">13</xref>].</p>
<p>Moreover, the importance of early diagnosis in improving treatment outcomes for breast cancer patients had been emphasized by Anupama et al. [<xref ref-type="bibr" rid="ref-14">14</xref>] and this in turn required tailored processing steps in each stage of image processing. As a result, research efforts had been directed towards developing efficient pre-processing techniques and segmentation algorithms to classify lesions within breast cancer mammograms and ultrasound images [<xref ref-type="bibr" rid="ref-14">14</xref>,<xref ref-type="bibr" rid="ref-15">15</xref>]. These techniques aim to address challenges such as speckle noise and accurate segmentation of small tumors in ultrasound images. Bilateral filters [<xref ref-type="bibr" rid="ref-16">16</xref>], Speckle Reducing Anisotropic Diffusion filters [<xref ref-type="bibr" rid="ref-17">17</xref>], were introduced in the image pre-processing stages for the efficient removal of speckle noise. Additionally, the use of advanced image processing methods, such as speckle noise reduction algorithms, had been proposed to enhance the quality of ultrasound images for more accurate tumor detection [<xref ref-type="bibr" rid="ref-18">18</xref>]. A detailed case study of the importance of data augmentation in medical image processing was presented by Wulff et al. [<xref ref-type="bibr" rid="ref-19">19</xref>].</p>
<p>The efficiency of the segmentation stage is governed by the clear definition of linear boundaries, and avoiding the overprediction of certain classes. Segmentation of ultrasound images was effectively carried out using Active Contours [<xref ref-type="bibr" rid="ref-20">20</xref>] and enhanced level set Active Contours [<xref ref-type="bibr" rid="ref-21">21</xref>]. A dual-attention network-based concurrent segmentation method was introduced by Iqbal et al. [<xref ref-type="bibr" rid="ref-22">22</xref>] that improved the accuracy of lesion segmentation. A Deep learning approach with U-NET segmentation was presented by Tarighat [<xref ref-type="bibr" rid="ref-23">23</xref>] and Zhao et al. [<xref ref-type="bibr" rid="ref-24">24</xref>]. U-NET architectures had demonstrated effectiveness in enhancing segmentation quality, especially for varying-size objects, and had outperformed baseline models across different datasets and imaging modalities [<xref ref-type="bibr" rid="ref-25">25</xref>&#x2013;<xref ref-type="bibr" rid="ref-28">28</xref>]. A combination of traditional methods in the early image processing stages and then using machine learning-based classification was experimentally found effective by many researchers. Convolution Neural Network CNN with Active Contours [<xref ref-type="bibr" rid="ref-29">29</xref>], Machine Learning (ML), and Genetic Optimization [<xref ref-type="bibr" rid="ref-30">30</xref>] were studied.</p>
<p>As a step above the detection and segmentation of lesions, the classification of tumors [<xref ref-type="bibr" rid="ref-31">31</xref>] and hence paving the way towards fully automated cancer diagnosis systems [<xref ref-type="bibr" rid="ref-32">32</xref>] is possible with the help of Artificial Intelligence (AI) techniques and is the topic of recent research. Optimization of AI-based techniques [<xref ref-type="bibr" rid="ref-33">33</xref>] is on the verge of development. A varied number of existing deep learning architectures were used in lesion detection from breast ultrasound (BUS) images [<xref ref-type="bibr" rid="ref-34">34</xref>]. Results could reveal that Convolutional Neural Network performs well in the detection and classification tasks [<xref ref-type="bibr" rid="ref-35">35</xref>]. A Deep neural network (DNN) model BUSnet that used an unsupervised bounding box regression algorithm was introduced by Li et al. [<xref ref-type="bibr" rid="ref-36">36</xref>]. A fusion of Reformed Differential Evaluation (RDE) techniques along with the Reformed Gray Wolf (RGW) optimization algorithm was used for the feature selection in the classification of the BUS dataset [<xref ref-type="bibr" rid="ref-37">37</xref>]. Detection and classification of the BUS dataset based on the TV model and GoogleLeNet model was proposed by Chen et al. [<xref ref-type="bibr" rid="ref-38">38</xref>]. Random forest-based classification was found effective in classifying BUS images [<xref ref-type="bibr" rid="ref-39">39</xref>]. A modified Random Forest classifier enhanced by a GridSearchCV was proposed by Li et al. [<xref ref-type="bibr" rid="ref-40">40</xref>].</p>
<p>Challenges such as identifying smaller lesions from noisy regions in (automated breast ultrasound) ABUS images were addressed with a stereoscopic attention network (SA-Net) [<xref ref-type="bibr" rid="ref-41">41</xref>]. The multi-view SA-Net unit uses a split output design to construct the 3D localization tensor and classification was done based on two features stereoscopic view and plane view to achieve excellent accuracy in the classification of ABUS images. Transfer learning based on combining different features is used effectively in prediction tasks [<xref ref-type="bibr" rid="ref-42">42</xref>]. While ultrasound imaging has shown promise in breast tumor detection, challenges such as differentiating between benign and malignant tumors persist. Some studies highlighted the limitations of current imaging modalities in accurately distinguishing between benign and malignant tumors [<xref ref-type="bibr" rid="ref-43">43</xref>]. This underscores the ongoing need for advanced diagnostic tools and techniques to address these challenges. While significant progress has been made, further research is needed to address the remaining challenges and advance the state-of-the-art in breast tumor detection and classification.</p>
<p>Major challenges associated with processing Ultrasound images for the classification of breast cancer are (i) the presence of speckle noise which reduces image&#x2019;s contrast and resolution (ii) lack of well-annotated datasets which makes machine learning difficult (iii) over prediction of certain classes in segmentation which affects classification accuracy (iv) selecting the most appropriate features that could reduce overtraining and thus produce good classification accuracy. The proposed hybrid model addresses these challenges to achieve good classification accuracy.</p>
</sec>
<sec id="s3">
<label>3</label>
<title>Materials and Methods</title>
<sec id="s3_1">
<label>3.1</label>
<title>Materials</title>
<sec id="s3_1_1">
<label>3.1.1</label>
<title>Dataset</title>
<p>The fundamental cornerstone of this research lies in the utilization of the Breast Ultrasound Images (BUSI) Database, a meticulously curated collection accessible at <ext-link ext-link-type="uri" xlink:href="https://scholar.cu.edu.eg/Dataset_BUSI.zip">https://scholar.cu.edu.eg/Dataset_BUSI.zip</ext-link>. This repository comprises a diverse array of breast ultrasound images, meticulously selected and annotated for research purposes. The dataset encompasses 780 images, consisting of 133 normal (N), 437 benign (B), and 210 malignant (M) images.</p>
<p>Each image within the BUSI Database is endowed with crucial attributes vital for tumor classification, including patient information, lesion type (benign or malignant), image quality metrics, and pertinent clinical metadata. For this research, a subset of 160 images was strategically employed for both training (100 images) and testing (60 images). The training dataset comprises 30 normal (N), 30 malignant (M), and 40 benign (B) images, while the testing dataset encompasses 20 images from each class.</p>
<p>The significance of this dataset lies in its representation of diverse breast tissue conditions, enabling comprehensive training and evaluation of the classification models. <xref ref-type="fig" rid="fig-1">Fig. 1</xref> visually represents samples from the dataset across various classes. <xref ref-type="fig" rid="fig-1">Fig. 1</xref> represents the sample images depicting different classes within the dataset: Normal, Benign, and Malignant.</p>
<fig id="fig-1">
<label>Figure 1</label>
<caption>
<title>Sample images from the input dataset revealing the different classes of infection</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_47961-fig-1.tif"/>
</fig>
</sec>
<sec id="s3_1_2">
<label>3.1.2</label>
<title>Data Pre-Processing</title>
<p>Before any analytical procedures, a rigorous pre-processing phase was undertaken to elevate the dataset&#x2019;s quality and consistency. Speckle noise, a common challenge in ultrasound image processing, was specifically addressed through various techniques:
<list list-type="bullet">
<list-item>
<p>Noise Reduction: The focal point of this phase was the reduction of speckle noise, achieved through an Enhanced Speckle-Reducing Anisotropic Diffusion technique. This approach adeptly eliminated speckle noise while preserving critical image details, thus enhancing image clarity.</p></list-item>
<list-item>
<p>Image Enhancement: Additional techniques like contrast adjustment and brightness normalization were meticulously applied to augment image quality and ensure uniform illumination across all images.</p></list-item>
<list-item>
<p>Normalization: Intensity normalization was meticulously performed to standardize pixel values, mitigating variations attributed to diverse image acquisition settings.</p></list-item>
<list-item>
<p>Data Augmentation: The augmentation process encompassed a spectrum of techniques including rotation, scaling, flips, and various geometric transformations. Iteratively, these operations augmented the original dataset, diversifying and enriching it with additional samples. This augmentation strategy is pivotal, particularly in scenarios where limited data availability poses a challenge in training robust deep-learning models.</p></list-item>
</list></p>
<sec id="s3_1_2_1">
<title>Enhanced SRAD for Noise Reduction</title>
<p>The Enhanced SRAD technique, an integral facet of the pre-processing stage, merits specific attention due to its role in effectively reducing speckle noise in ultrasound images. This subsection elaborates on the theoretical underpinnings of Enhanced SRAD, delineating its mathematical models for noise reduction.</p>
<p>Conventional Anisotropic Diffusion (AD) effectively eliminates additive noise but tends to compromise edge information, especially when confronted with speckle noise. To overcome this limitation, the Enhanced SRAD technique incorporates the Instantaneous Coefficient of Variation (ICOV) to selectively reduce speckle noise while preserving edges.</p>
<p>The SRAD process operates via partial differential equations, as delineated by the <xref ref-type="disp-formula" rid="eqn-1">Eqs. (1)</xref> through <xref ref-type="disp-formula" rid="eqn-10">(10)</xref> provided below. These equations encapsulate the diffusion coefficient, threshold for diffusion, and the mathematical representation of the ICOV-based diffusion.</p>
</sec>
<sec id="s3_1_2_2">
<title>Mathematical Modeling</title>
<p>Let I(a, b) be the image intensity matrix without zeros and have finite power in the two-dimensional coordinate grid (&#x2126;), a partial differential equation model of SRAD will produce an output I(a, b, t).
<disp-formula id="eqn-1"><label>(1)</label><mml:math id="mml-eqn-1" display="block"><mml:mrow><mml:mtext>I</mml:mtext></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mtext>a</mml:mtext></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mtext>b</mml:mtext></mml:mrow><mml:mo>,</mml:mo><mml:mn>0</mml:mn><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:msub><mml:mi>I</mml:mi><mml:mrow><mml:mi>o</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mtext>a</mml:mtext></mml:mrow><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mfrac><mml:mrow><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:mi>I</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>a</mml:mi><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo>;</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:munder><mml:mo stretchy="false">&#x2192;</mml:mo><mml:mrow><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:mi>n</mml:mi></mml:mrow></mml:munder></mml:mfrac><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:mi mathvariant="normal">&#x03A9;</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:math></disp-formula>
<disp-formula id="eqn-2"><label>(2)</label><mml:math id="mml-eqn-2" display="block"><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:mrow><mml:mtext>I</mml:mtext></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mfrac><mml:mrow><mml:mrow><mml:mtext>a</mml:mtext></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mtext>b</mml:mtext></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mtext>t</mml:mtext></mml:mrow></mml:mrow><mml:mrow><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:mrow><mml:mtext>t</mml:mtext></mml:mrow></mml:mrow></mml:mfrac><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mi>d</mml:mi><mml:mi>i</mml:mi><mml:mi>v</mml:mi><mml:mrow><mml:mo>[</mml:mo><mml:mi>c</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>f</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mi mathvariant="normal">&#x2207;</mml:mi><mml:mi>I</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mtext>a</mml:mtext></mml:mrow><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo>;</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:math></disp-formula>
<disp-formula id="eqn-3"><label>(3)</label><mml:math id="mml-eqn-3" display="block"><mml:mrow><mml:mtext>c</mml:mtext></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mtext>f</mml:mtext></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:mn>1</mml:mn><mml:mo>+</mml:mo><mml:mrow><mml:mo>&#x230A;</mml:mo><mml:msup><mml:mi>f</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mtext>a</mml:mtext></mml:mrow><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:msubsup><mml:mi>f</mml:mi><mml:mrow><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup><mml:mo stretchy="false">(</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x230B;</mml:mo></mml:mrow><mml:mrow><mml:mo>/</mml:mo></mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:mfrac><mml:mspace width="1em" /><mml:mrow><mml:mtext>or</mml:mtext></mml:mrow><mml:mspace width="thinmathspace" /><mml:mrow><mml:mtext>c</mml:mtext></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mtext>f</mml:mtext></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mi>exp</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mo>&#x2212;</mml:mo><mml:mfrac><mml:mrow><mml:mo>[</mml:mo><mml:msup><mml:mi>f</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mtext>a</mml:mtext></mml:mrow><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:msubsup><mml:mi>f</mml:mi><mml:mrow><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>]</mml:mo></mml:mrow><mml:mi>T</mml:mi></mml:mfrac><mml:mo>}</mml:mo></mml:mrow></mml:math></disp-formula></p>
<p>where, I(a, b) represents the image intensity matrix, div denotes divergence, &#x2207; is the gradient, c(f) is the diffusion coefficient, and T is the threshold. The characteristic of this diffusion coefficient is that it is more pronounced in homogenous regions and restricted around the image boundaries. Where,
<disp-formula id="eqn-4"><label>(4)</label><mml:math id="mml-eqn-4" display="block"><mml:mrow><mml:mtext>f</mml:mtext></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mtext>a</mml:mtext></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mtext>b</mml:mtext></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mtext>t</mml:mtext></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:msqrt><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mn>1</mml:mn><mml:mn>2</mml:mn></mml:mfrac></mml:mstyle><mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mi mathvariant="normal">&#x2207;</mml:mi><mml:mi>I</mml:mi></mml:mrow><mml:mi>I</mml:mi></mml:mfrac></mml:mstyle><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mn>1</mml:mn><mml:msup><mml:mn>4</mml:mn><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mfrac></mml:mstyle><mml:mo>)</mml:mo></mml:mrow><mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:msup><mml:mi mathvariant="normal">&#x2207;</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup><mml:mi>I</mml:mi></mml:mrow><mml:mi>I</mml:mi></mml:mfrac></mml:mstyle><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:msup><mml:mrow><mml:mo>[</mml:mo><mml:mn>1</mml:mn><mml:mo>+</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mn>1</mml:mn><mml:mn>4</mml:mn></mml:mfrac></mml:mstyle><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:msup><mml:mi mathvariant="normal">&#x2207;</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup><mml:mi>I</mml:mi></mml:mrow><mml:mi>I</mml:mi></mml:mfrac></mml:mstyle><mml:mo>)</mml:mo></mml:mrow><mml:mo>]</mml:mo></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mfrac></mml:mstyle></mml:msqrt></mml:math></disp-formula>
<disp-formula id="eqn-5"><label>(5)</label><mml:math id="mml-eqn-5" display="block"><mml:msub><mml:mrow><mml:mtext>f</mml:mtext></mml:mrow><mml:mrow><mml:mrow><mml:mtext>o</mml:mtext></mml:mrow></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mtext>t</mml:mtext></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mfrac><mml:msqrt><mml:mi>v</mml:mi><mml:mi>a</mml:mi><mml:mi>r</mml:mi><mml:mo stretchy="false">[</mml:mo><mml:mi>z</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo stretchy="false">]</mml:mo></mml:msqrt><mml:mrow><mml:mi>z</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>t</mml:mi><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:mo>&#x2032;</mml:mo></mml:mrow></mml:msup></mml:mrow></mml:mfrac></mml:math></disp-formula>
<disp-formula id="eqn-6"><label>(6)</label><mml:math id="mml-eqn-6" display="block"><mml:mrow><mml:mtext>T</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:msubsup><mml:mi>f</mml:mi><mml:mrow><mml:mi>o</mml:mi></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mn>1</mml:mn><mml:mo>+</mml:mo><mml:msubsup><mml:mi>f</mml:mi><mml:mrow><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup><mml:mo stretchy="false">(</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula></p>
<p>Here f(a, b, t) is the instantaneous variation coefficient (ICOV) that is introduced to identify the edges in the image as it is more pronounced near the edges and less pronounced in the homogeneous regions. And <inline-formula id="ieqn-1"><mml:math id="mml-ieqn-1"><mml:msup><mml:mi mathvariant="normal">&#x2207;</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:math></inline-formula> is the Laplacian operator, T is the threshold of diffusion, <inline-formula id="ieqn-2"><mml:math id="mml-ieqn-2"><mml:msub><mml:mi>f</mml:mi><mml:mrow><mml:mi>o</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> is the coefficient of variation at t &#x003D; 0, <inline-formula id="ieqn-3"><mml:math id="mml-ieqn-3"><mml:mi>z</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>t</mml:mi><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:mo>&#x2032;</mml:mo></mml:mrow></mml:msup></mml:math></inline-formula> is the mean and <inline-formula id="ieqn-4"><mml:math id="mml-ieqn-4"><mml:mi>v</mml:mi><mml:mi>a</mml:mi><mml:mi>r</mml:mi><mml:mo stretchy="false">[</mml:mo><mml:mi>z</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo stretchy="false">]</mml:mo></mml:math></inline-formula> variance of the intensity function.</p>
<p>When <inline-formula id="ieqn-5"><mml:math id="mml-ieqn-5"><mml:msup><mml:mi>f</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mtext>a</mml:mtext></mml:mrow><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:msubsup><mml:mi>f</mml:mi><mml:mrow><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula> is greater than T, c(f) &#x2192; zero, and diffusion stops (near edges).</p>
<p>When <inline-formula id="ieqn-6"><mml:math id="mml-ieqn-6"><mml:msup><mml:mi>f</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mtext>a</mml:mtext></mml:mrow><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:msubsup><mml:mi>f</mml:mi><mml:mrow><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup><mml:mrow><mml:mo>(</mml:mo><mml:mi>t</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula> is less than T, c(f) &#x2192; 1, diffusion function acts as a filter (homogeneous region).</p>
<p>As T, the threshold is the determining factor of diffusion; T determines the amount of speckle reduction and edge information perseverance. Without using log compression, the SRAD filtering approach may process data immediately at the same time retaining the information content of the image.</p>
<p>The ICOV-based diffusion selectively acts as a filter during image pre-processing, enhancing images for subsequent machine learning-based segmentation. The quality of the image pre-processing step is verified by measurements such as SNR (Signal to Noise Ratio), Peak SNR (PSNR) that reveal the noise reduction capability, Mean Square Error (MSE) that shows the error, Structural Similarity Index Measure (SSIM) and the formula used for calculating the same are given by <xref ref-type="disp-formula" rid="eqn-7">Eqs. (7)</xref> through <xref ref-type="disp-formula" rid="eqn-10">(10)</xref> below:
<disp-formula id="eqn-7"><label>(7)</label><mml:math id="mml-eqn-7" display="block"><mml:mrow><mml:mtext>SNR</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mn>10</mml:mn><mml:mspace width="thinmathspace" /><mml:mi>l</mml:mi><mml:mi>o</mml:mi><mml:msub><mml:mi>g</mml:mi><mml:mrow><mml:mn>10</mml:mn></mml:mrow></mml:msub><mml:mrow><mml:mo>[</mml:mo><mml:mfrac><mml:mrow><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mrow><mml:mi>a</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mrow><mml:mi>b</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mo stretchy="false">[</mml:mo><mml:mi>r</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>a</mml:mi><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:msup><mml:mo stretchy="false">]</mml:mo><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:mrow><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mrow><mml:mi>a</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mrow><mml:mi>b</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mo stretchy="false">[</mml:mo><mml:mi>r</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>a</mml:mi><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>a</mml:mi><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:msup><mml:mo stretchy="false">]</mml:mo><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow></mml:mfrac><mml:mo>]</mml:mo></mml:mrow></mml:math></disp-formula>
<disp-formula id="eqn-8"><label>(8)</label><mml:math id="mml-eqn-8" display="block"><mml:mrow><mml:mtext>PSNR</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mn>10</mml:mn><mml:mspace width="thinmathspace" /><mml:mi>l</mml:mi><mml:mi>o</mml:mi><mml:msub><mml:mi>g</mml:mi><mml:mrow><mml:mn>10</mml:mn></mml:mrow></mml:msub><mml:mrow><mml:mo>[</mml:mo><mml:mfrac><mml:mrow><mml:mo movablelimits="true" form="prefix">max</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi>r</mml:mi><mml:mo>,</mml:mo><mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mi>a</mml:mi><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mrow><mml:mi>a</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>n</mml:mi><mml:mrow><mml:mi>b</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mfrac></mml:mstyle><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mrow><mml:mi>a</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:msub><mml:mi>n</mml:mi><mml:mrow><mml:mi>b</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:msup><mml:mrow><mml:mo>[</mml:mo><mml:mi>r</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>a</mml:mi><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>t</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>a</mml:mi><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>]</mml:mo></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow></mml:mfrac><mml:mo>]</mml:mo></mml:mrow></mml:math></disp-formula>
<disp-formula id="eqn-9"><label>(9)</label><mml:math id="mml-eqn-9" display="block"><mml:mrow><mml:mtext>MSE</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mi>n</mml:mi></mml:mfrac><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:munderover><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mi>Y</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msubsup><mml:mi>Y</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mo>&#x2032;</mml:mo></mml:mrow></mml:msubsup></mml:mrow><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:math></disp-formula>
<disp-formula id="eqn-10"><label>(10)</label><mml:math id="mml-eqn-10" display="block"><mml:mrow><mml:mtext>SSIM</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>2</mml:mn><mml:msub><mml:mi>&#x03BC;</mml:mi><mml:mrow><mml:mi>a</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>&#x03BC;</mml:mi><mml:mrow><mml:mi>b</mml:mi></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>C</mml:mi><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mn>2</mml:mn><mml:msub><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mi>a</mml:mi><mml:mi>b</mml:mi></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>C</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:msubsup><mml:mi>&#x03BC;</mml:mi><mml:mrow><mml:mi>a</mml:mi></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup><mml:mo>+</mml:mo><mml:msubsup><mml:mi>&#x03BC;</mml:mi><mml:mrow><mml:mi>b</mml:mi></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup><mml:mo>+</mml:mo><mml:msub><mml:mi>C</mml:mi><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:msubsup><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mi>a</mml:mi></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup><mml:mo>+</mml:mo><mml:msubsup><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mi>b</mml:mi></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup><mml:mo>+</mml:mo><mml:msub><mml:mi>C</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mfrac></mml:math></disp-formula></p>
<p>Mathematical models govern the diffusion process, wherein a threshold parameter (T) plays a pivotal role in determining the balance between speckle reduction and edge information preservation. A meticulous evaluation of the pre-processing quality ensued, as showcased by the quantitative metrics in <xref ref-type="table" rid="table-1">Table 1</xref> and visualized in <xref ref-type="fig" rid="fig-2">Fig. 2</xref>.</p>
<table-wrap id="table-1">
<label>Table 1</label>
<caption>
<title>Pre-processing quality evaluation</title>
</caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th rowspan="2">Pre-processing technique</th>
<th align="center" colspan="4">Image-1</th>
<th align="center" colspan="4">Image-2</th>
</tr>
<tr>
<th>Signal to<break/>noise ratio<break/>(dB)</th>
<th>Peak<break/>SNR<break/>(dB)</th>
<th>Mean square error</th>
<th>Similarity index</th>
<th>Signal to<break/>noise ratio<break/>(dB)</th>
<th>Peak<break/>SNR<break/>(dB)</th>
<th>Mean square error</th>
<th>Similarity index</th>
</tr>
</thead>
<tbody>
<tr>
<td>SRAD</td>
<td>30.2365</td>
<td>32.4008</td>
<td>0.0012</td>
<td>0.9874</td>
<td>28.4837</td>
<td>35.1211</td>
<td>0.0013</td>
<td>0.9745</td>
</tr>
<tr>
<td>eSRAD</td>
<td>33.2315</td>
<td>35.2315</td>
<td>0.0003</td>
<td>0.9987</td>
<td>30.3646</td>
<td>38.0021</td>
<td>0.0001</td>
<td>0.9996</td>
</tr>
</tbody>
</table>
</table-wrap><fig id="fig-2">
<label>Figure 2</label>
<caption>
<title>Sample input images (a &#x0026; d), Noise removed output images (b, c, e &#x0026; f)</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_47961-fig-2.tif"/>
</fig>
</sec>
</sec>
<sec id="s3_1_3">
<label>3.1.3</label>
<title>Data Augmentation</title>
<p>The effective prediction and classification accuracy of a Deep Learning architecture fundamentally hinges upon the quantity and diversity of data samples used for training. In medical image processing scenarios where data scarcity prevails, data augmentation plays a pivotal role in enriching the training dataset. In our research, data augmentation strategies encompassed an array of operations including translation, rotation, gray value variation, and elastic deformations. These operations iteratively augmented the original dataset, ultimately diversifying it to accommodate an extensive sample set. This augmentation process significantly expanded the dataset to a total of 9000 samples, ensuring a more comprehensive and diverse representation across normal, malignant, and abnormal images.</p>
</sec>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Methods</title>
<sec id="s3_2_1">
<label>3.2.1</label>
<title>U-NET-Based Image Segmentation</title>
<p>Segmentation plays an essential role in both image processing and pattern recognition. It facilitates the identification and separation of abnormalities for improved diagnosis. In our Computer-Aided Diagnosis (CAD) system, we employ segmentation to delineate Regions of Interest (ROIs). U-NET-based architecture dependent entirely on the convolution network is found to enhance the segmentation of ultrasound images effectively. Whereas, an Active Contour (snake model) is also a commonly used method for ROI separation. In many medical applications, active contour is widely adopted and hence a comparison between the active contour and U-NET architecture is carried out. U-NET is chosen for the framework and a tailored U-NET architecture is constructed for image segmentation. To achieve an optimal performance in time and complexity, a novel pre-processing is done and a tailored 5-stage U-NET is used for segmenting the pre-processed and augmented dataset. The equations that define the U-NET working include its activation function and its energy function. The U-NET energy function is given by
<disp-formula id="eqn-11"><label>(11)</label><mml:math id="mml-eqn-11" display="block"><mml:mrow><mml:mtext>E</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mo>&#x2211;</mml:mo><mml:mi>w</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>x</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mi>log</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>p</mml:mi><mml:msub><mml:mi>k</mml:mi><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mi>x</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula></p>
<p>where, this summation is taken over <inline-formula id="ieqn-7"><mml:math id="mml-ieqn-7"><mml:mi>x</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mi mathvariant="normal">&#x03A9;</mml:mi></mml:math></inline-formula>, k<sub>x</sub> is the pixel label and w, is the weight map. Here, the activation function is SoftMax and is applied on a pixel-wise basis. This is defined as below:
<disp-formula id="eqn-12"><label>(12)</label><mml:math id="mml-eqn-12" display="block"><mml:msub><mml:mrow><mml:mtext>p</mml:mtext></mml:mrow><mml:mrow><mml:mrow><mml:mtext>k</mml:mtext></mml:mrow></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mi>exp</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>k</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mi>x</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mo>/</mml:mo></mml:mrow><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:msup><mml:mi>k</mml:mi><mml:mrow><mml:mo>&#x2032;</mml:mo></mml:mrow></mml:msup><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>k</mml:mi></mml:mrow></mml:msubsup><mml:mi>exp</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>k</mml:mi></mml:mrow></mml:msub><mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mi>x</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mo>&#x2032;</mml:mo></mml:mrow></mml:msup><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula></p>
<p>where, P<sub>k</sub> &#x003D; approximation of maximum function, a<sub>k</sub>(x) is an activation in feature channel k at pixel position x, and k is the number of classes.</p>
<p>The network learns the distinct borders between cells by making use of morphological operations. The weight map is given by
<disp-formula id="eqn-13"><label>(13)</label><mml:math id="mml-eqn-13" display="block"><mml:mi>w</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>x</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:msub><mml:mi>w</mml:mi><mml:mrow><mml:mi>c</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mi>x</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>+</mml:mo><mml:msub><mml:mi>w</mml:mi><mml:mrow><mml:mn>0</mml:mn></mml:mrow></mml:msub><mml:mi>exp</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mo>&#x2212;</mml:mo><mml:mfrac><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mi>d</mml:mi><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>x</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>+</mml:mo><mml:mrow><mml:msub><mml:mi>d</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>x</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:mrow><mml:mn>2</mml:mn><mml:msup><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow></mml:mfrac><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula></p>
<p>where, w<sub>c</sub> is the weight map to balance class frequencies, d<sub>1</sub> is the distance to the border of the nearest cell, d<sub>2</sub> is the distance to the border of the second nearest cell. Initial weights are assigned from Gaussian distribution with standard deviation <inline-formula id="ieqn-8"><mml:math id="mml-ieqn-8"><mml:msqrt><mml:mn>2</mml:mn><mml:mrow><mml:mo>/</mml:mo></mml:mrow><mml:mi>N</mml:mi></mml:msqrt></mml:math></inline-formula> where N is the number of inbound nodes of one neuron.</p>
<sec id="s3_2_1_1">
<title>U-NET Architecture</title>
<p>The pre-processed image is segmented in the next step and U-NET segmentation is governed by the energy function defined by <xref ref-type="disp-formula" rid="eqn-11">Eq. (11)</xref>. Softmax activation is used for U-NET and the segmented output obtained is used for classification of tumor lesions. <xref ref-type="fig" rid="fig-3">Fig. 3</xref> presents the architecture of U-NET.</p>
<fig id="fig-3">
<label>Figure 3</label>
<caption>
<title>Architecture of U-NET</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_47961-fig-3.tif"/>
</fig>
<p>The U-NET architecture presents a reliable and quick network for segmenting ultrasound images. It comprises of three sections: Contraction, Bottleneck, and Expansion. The Contraction phase consists of CNN layers, Rectified Linear Unit&#x2014;(ReLU) layer, and down-sampling max-pooling layers. The Bottleneck layer mediates between Contraction and Expansion, while the Expansion section consists of CNN layers, ReLU layer, and up-sampling layers. The combination of these operations results in the U-shaped network architecture called the U-NET.</p>
<p>Inherent to the U-NET architecture are the contracting or down-sampling stages constructed from 2D convolution layers with fixed kernels and doubling channels or feature maps, followed by a tracking unit to converge cost functions faster and a max-pooling layer to divide the input at each stage. In our experiment, each of the contracting stages consists of two 2D convolution layers of kernel size 3 &#x00D7; 3 followed by a ReLU that thresholds the pixels less than zero and smoothens the image and a max-pooling layer that downsamples the image by a pool size of [5,5]. The first stage of the U-NET starts with a feature map of 32 which doubles in each stage up to 512 features. The image at the bottleneck is up-sampled by a transposed 2D convolution function of a 3 &#x00D7; 3 kernel and a feature map size of 256. Now, the expanding or up-sampling stages are constructed from two 2D CNNs with a kernel size of 3 &#x00D7; 3 followed by ReLU layers and a transposed 2D convolution layer of 3 &#x00D7; 3 kernel, feature map of size half that of the previous stage. The U-NET in total consists of 18-2D convolution layers, 4 transposed convolution layers, 18 ReLU layers, 4 max-pooling layers, one fully connected layer, and one soft-max layer. A pixel classification layer follows the soft-max to label each pixel for the segmentation step. The flowchart depicting the process involved with segmenting an image using the U-NET algorithm is given in <xref ref-type="fig" rid="fig-4">Fig. 4</xref>.</p>
<fig id="fig-4">
<label>Figure 4</label>
<caption>
<title>U-NET segmentation flowchart</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_47961-fig-4.tif"/>
</fig>
</sec>
<sec id="s3_2_1_2">
<title>Classification Stage</title>
<p>The efficiency of the classification stage is dependent on the effectiveness of feature extraction and selection. Twelve different combinations of classification experiments are conducted to determine the best-performing classification stage. The experiments are framed by using (i) Three different classifiers say Random Forest, Neural Network, and Bagging tree, (ii) Two feature selection algorithms say Genetic Algorithm and Leave One Out Cross-Validation (LOOCV) algorithm and (iii) Two feature extraction algorithms say Wavelet transforms and Gray Level Co-occurrence Matrix (GLCM). Each of them is explained below.</p>
</sec>
<sec id="s3_2_1_3">
<title>Feature Extraction</title>
<p>Texture feature extractions can be classified as statistical, structural, transform-based, model-based, graph-based, learning-based, and entropy-based approaches and in this experiment, the most frequently used transform-based approach say multiscale wavelet transform is compared with Gray Level Co-occurrence Matrix (GLCM) for texture feature extraction. Textural features are calculated using wavelet transforms and GLCM to gain insights into image content. Wavelet transforms and&#x2014;GLCM-based textural feature extractions were found to perform well in the extraction of features from ultrasound images and thus they are chosen for the extraction process.</p>
<p>Wavelet coefficients when employed for feature extraction from hyperspectral data involve calculating entropy, standard deviation, energy, waveform length, and variance from the image. GLCM serves as a fundamental tool for extracting second-order statistical texture features which include Mean, Contrast, Standard Deviation, Correlation, Energy, Homogeneity, Skewness, Kurtosis, and Entropy. The formulae for these features are provided in <xref ref-type="table" rid="table-2">Table 2</xref>.</p>
<table-wrap id="table-2">
<label>Table 2</label>
<caption>
<title>Features from GLCM</title>
</caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th>Features from GLCM</th>
<th>Equations</th>
<th>Features from GLCM</th>
<th>Equations</th>
</tr>
</thead>
<tbody>
<tr>
<td>Mean</td>
<td><inline-formula id="ieqn-9"><mml:math id="mml-ieqn-9"><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mn>1</mml:mn><mml:mi>n</mml:mi></mml:mfrac></mml:mstyle><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>L</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mi>p</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula></td>
<td>Contrast</td>
<td><inline-formula id="ieqn-10"><mml:math id="mml-ieqn-10"><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>y</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:msup><mml:mrow><mml:mo>|</mml:mo><mml:mi>x</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>y</mml:mi><mml:mo>|</mml:mo></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup><mml:mi>p</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula></td>
</tr>
<tr>
<td>Standard deviation</td>
<td><inline-formula id="ieqn-11"><mml:math id="mml-ieqn-11"><mml:msqrt><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi></mml:mrow></mml:munderover><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mo>&#x2212;</mml:mo><mml:msup><mml:mi>x</mml:mi><mml:mrow><mml:mo>&#x2032;</mml:mo></mml:mrow></mml:msup><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:mfrac></mml:mstyle></mml:msqrt></mml:math></inline-formula></td>
<td>Correlation</td>
<td><inline-formula id="ieqn-12"><mml:math id="mml-ieqn-12"><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>y</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>&#x03BC;</mml:mi><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>&#x03BC;</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow><mml:mi>p</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mrow><mml:msub><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mfrac></mml:mstyle></mml:math></inline-formula></td>
</tr>
<tr>
<td>Energy</td>
<td><inline-formula id="ieqn-13"><mml:math id="mml-ieqn-13"><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>y</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mi>p</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:math></inline-formula></td>
<td>Kurtosis</td>
<td><inline-formula id="ieqn-14"><mml:math id="mml-ieqn-14"><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:msubsup><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:msup><mml:mi>x</mml:mi><mml:mrow><mml:mo>&#x2032;</mml:mo></mml:mrow></mml:msup><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:mn>4</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:msup><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mn>4</mml:mn></mml:mrow></mml:msup></mml:mfrac></mml:mstyle></mml:math></inline-formula></td>
</tr>
<tr>
<td>Entropy</td>
<td><inline-formula id="ieqn-15"><mml:math id="mml-ieqn-15"><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>y</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mi>p</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mi>log</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>p</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula></td>
<td>Skewness</td>
<td><inline-formula id="ieqn-16"><mml:math id="mml-ieqn-16"><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:msubsup><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:msup><mml:mi>x</mml:mi><mml:mrow><mml:mo>&#x2032;</mml:mo></mml:mrow></mml:msup><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:mn>3</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:msup><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mn>3</mml:mn></mml:mrow></mml:msup></mml:mfrac></mml:mstyle></mml:math></inline-formula></td>
</tr>
<tr>
<td>Homogeneity</td>
<td><inline-formula id="ieqn-17"><mml:math id="mml-ieqn-17"><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>x</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:msubsup><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>y</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mi>p</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn><mml:mo>+</mml:mo><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow><mml:mi>x</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>y</mml:mi><mml:mrow><mml:mo stretchy="false">|</mml:mo></mml:mrow></mml:mrow></mml:mfrac></mml:mstyle></mml:math></inline-formula></td>
<td></td>
<td></td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3_2_1_4">
<title>Feature Selection</title>
<p>In this phase, we assess two commonly used feature selection methods Genetic Algorithm (GA) and Leave One Out Cross-Validation (LOOCV) to determine the most efficient approach.</p>
<p><bold>LOOCV Algorithm:</bold> LOOCV is a type of K-fold validation technique that estimates machine learning model performance. It involves leaving one observation out for validation while the rest serve as the training set. The model is then used to make predictions on the left-out observation, and the Mean Square Error (MSE) is computed. This process is repeated &#x2018;n&#x2019; times, as illustrated by <xref ref-type="disp-formula" rid="eqn-14">Eqs. (14)</xref> and <xref ref-type="disp-formula" rid="eqn-15">(15)</xref>. LOOCV provides deterministic and accurate results. The advantage of using LOOCV is that it is highly deterministic and tends to be accurate.</p>
<p><bold>Genetic Algorithm:</bold> Genetic Algorithms (GA) are stochastic methods employed for optimizing machine learning-based systems. GA is particularly effective for feature selection in image processing, hyperparameter tuning in Artificial Neural Networks (ANNs), and pipeline optimization in machine learning. Here in our experiment the classification accuracy is used to select the most relevant features alone from the feature set and this is found to result in better accuracy. <xref ref-type="table" rid="table-3">Table 3</xref> presents the LOOCV and Genetic Algorithm based algorithms being used for feature selection.</p>
<table-wrap id="table-3">
<label>Table 3</label>
<caption>
<title>Algorithms used for feature selection</title>
</caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th>Algorithm 1 (LOOCV algorithm)</th>
<th>Algorithm 2 (Genetic algorithm)</th>
</tr>
</thead>
<tbody>
<tr>
<td>1. Divide the dataset &#x2192; training and testing set</td>
<td>1. Create initial Random Population n, chromosomes crossover percentage of 0.7 and mutation rate of 0.1</td>
</tr>
<tr>
<td>2. Build model &#x2192; training dataset</td>
<td>2. Evaluate fitness function &#x2192; classification accuracy</td>
</tr>
<tr>
<td>3. Make Prediction &#x2192; testing dataset</td>
<td>3. Check exit criteria &#x2192; <inline-formula id="ieqn-18"><mml:math id="mml-ieqn-18"><mml:msub><mml:mi>P</mml:mi><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:msub><mml:mi>f</mml:mi><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi></mml:mrow></mml:munderover><mml:msub><mml:mi>f</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mfrac></mml:mstyle></mml:math></inline-formula>, where j &#x003D; 1,2,&#x2026;,N Probability of fitness, report best solution. Else</td>
</tr>
<tr>
<td>4. Measure MSE &#x2192; <xref ref-type="disp-formula" rid="eqn-15">Eq. (15)</xref>. &#x201C;Test MSE&#x201D;</td>
<td>4. Selection of features from the feature set</td>
</tr>
<tr>
<td>5. Measure Model performance &#x2192; from MSE</td>
<td>5. Perform Crossover and obtain crossover offspring</td>
</tr>
<tr>
<td>6. Predict the response &#x2192; for the observation left out using the model</td>
<td>6. Perform mutation to obtain offspring</td>
</tr>
<tr>
<td>7. Repeat the process &#x2018;n&#x2019; times</td>
<td>7. Sort out the offspring and move to step 2</td>
</tr>
</tbody>
</table>
</table-wrap>
<p><italic>Mean Square Error</italic>
<disp-formula id="eqn-14"><label>(14)</label><mml:math id="mml-eqn-14" display="block"><mml:mrow><mml:mtext>MSE</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mi>n</mml:mi></mml:mfrac><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2217;</mml:mo><mml:mo>&#x2211;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mi>y</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>f</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:math></disp-formula>
<disp-formula id="eqn-15"><label>(15)</label><mml:math id="mml-eqn-15" display="block"><mml:mrow><mml:mtext>Test MSE</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mi>n</mml:mi></mml:mfrac><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2217;</mml:mo><mml:mo>&#x2211;</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:msub><mml:mi>E</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:math></disp-formula>
<disp-formula id="eqn-16"><label>(16)</label><mml:math id="mml-eqn-16" display="block"><mml:msub><mml:mi>f</mml:mi><mml:mrow><mml:mi>f</mml:mi><mml:mi>i</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>d</mml:mi></mml:mrow></mml:munderover><mml:msub><mml:mi>C</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mrow><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>m</mml:mi></mml:mrow></mml:munderover><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi>j</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:munderover><mml:msub><mml:mi>C</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mfrac></mml:math></disp-formula></p>
</sec>
<sec id="s3_2_1_5">
<title>Classification Step</title>
<p>Three different models Random Forest, Bagging tree, and Neural Network classifiers are chosen for evaluating the best model for lesion classification. Their classification efficiencies are compared to select the best classifier for our model. The theoretical explanations, mathematical modeling, and algorithmic descriptions of the classifiers are discussed below.</p>
<p><bold>Random Forest (RF) Classifier:</bold> Random Forest (RF) is a versatile ML algorithm used for prediction and classification tasks. It builds decision trees from random samples and combines their results through majority voting or averaging. Capable of handling both continuous and discrete variables, random forest improves accuracy while reducing error rates when multiple decision trees are involved. The RF accuracy depends on the number of trees created in the forest and it uses batching and randomisation in constructing each tree which is prescribed by the Gini index given by <inline-formula id="ieqn-19"><mml:math id="mml-ieqn-19"><mml:msub><mml:mi>G</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>c</mml:mi></mml:mrow></mml:munderover><mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mi>P</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:math></inline-formula>, where &#x2018;P<sub>i</sub>&#x2019; denotes the frequency of class noticed and &#x2018;c&#x2019; denotes the number of classes in the dataset.</p>
<p><bold>Bagging Tree Classifier:</bold> The Bagging Tree is an ensemble-based Bootstrap Aggregation algorithm that efficiently optimizes classifier robustness and accuracy, especially in high-dimensional data with missing values. It reduces inconsistency in machine learning models, making it useful for handling variations in datasets. An ensemble method of numerous models predicts the class with the highest probability of the chosen category. The formula to determine the class with the highest probability is given by <inline-formula id="ieqn-20"><mml:math id="mml-ieqn-20"><mml:mi>C</mml:mi><mml:mi>l</mml:mi><mml:mi>a</mml:mi><mml:mi>s</mml:mi><mml:mi>s</mml:mi><mml:mspace width="thinmathspace" /><mml:mi>w</mml:mi><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mspace width="thinmathspace" /><mml:mi>h</mml:mi><mml:mi>i</mml:mi><mml:mi>g</mml:mi><mml:mi>h</mml:mi><mml:mspace width="thinmathspace" /><mml:mi>P</mml:mi><mml:mi>r</mml:mi><mml:mi>o</mml:mi><mml:mi>b</mml:mi><mml:mi>a</mml:mi><mml:mi>b</mml:mi><mml:mi>i</mml:mi><mml:mi>l</mml:mi><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>y</mml:mi><mml:mspace width="thinmathspace" /><mml:msub><mml:mi>C</mml:mi><mml:mrow><mml:mi>h</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mi>m</mml:mi><mml:mi>o</mml:mi><mml:mi>d</mml:mi><mml:mi>e</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>N</mml:mi><mml:mrow><mml:mi>c</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:msubsup><mml:mi>y</mml:mi><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mo>)</mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:msub><mml:mi>N</mml:mi><mml:mrow><mml:mi>c</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:msubsup><mml:mi>y</mml:mi><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup><mml:mo>)</mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:mo>&#x2026;</mml:mo><mml:mo>&#x2026;</mml:mo><mml:mo>.</mml:mo><mml:mo>,</mml:mo><mml:msub><mml:mi>N</mml:mi><mml:mrow><mml:mi>c</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:msubsup><mml:mi>y</mml:mi><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msubsup><mml:mo>)</mml:mo></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula></p>
<p><bold>Neural Network Classifier:</bold> Artificial Neural Networks (ANNs) mimic the behavior of the human brain and have been effectively employed for ultrasound image classification. ANNs consist of input, hidden, and output layers, with interconnected nodes applying thresholds and weights. They are particularly suited for classification tasks.</p>
</sec>
</sec>
</sec>
</sec>
<sec id="s4">
<label>4</label>
<title>Proposed Methodology</title>
<p>Early diagnosis remains the cornerstone of saving lives from cancer and thus has motivated researchers to explore the potential of computer-aided diagnosis (CAD) for cancer detection and classification tasks. Our proposed image processing model aims to improve the classification and segmentation accuracy of breast ultrasound images. The model is capable of categorizing the ultrasound images into normal, affected benign, or malignant. The image processing model is developed in steps by comparing the efficiencies of the most popular algorithms used in each image processing stage. A block diagram representation of the techniques and algorithms studied in each stage of the hybrid model development is illustrated in <xref ref-type="fig" rid="fig-5">Fig. 5</xref>. A hybrid model is developed comparing the experimental results obtained from each stage of image processing.</p>
<fig id="fig-5">
<label>Figure 5</label>
<caption>
<title>Different techniques and algorithms studied in the development of proposed hybrid model system</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_47961-fig-5.tif"/>
</fig>
<sec id="s4_1">
<label>4.1</label>
<title>Hybrid Model Architecture&#x2014;Flowchart and Its Algorithm</title>
<p>Highlighting feature of the proposed hybrid model is the collective use of best techniques in every stage of US image processing. The image processing pipeline starts with an efficient pre-processing stage powered by tailored eSRAD filtering that uses anisotropic diffusion (AD) filtering which is efficient towards reducing speckle noise in US images. A major limitation with AD filtering is its inability to preserve edges, which is addressed in eSRAD filtering by fixing the diffusion threshold based on the instantaneous variation coefficient (ICOV). Edge preserved, speckle reduced US images are segmented by using U-NET DNN. The U-NET architecture introduced in the model uses a 5-stage encoding powered by 2D Convolutional layers, ReLU layer, Max-pooling layer ending up in a bottleneck layer which is followed by 5-stage decoding carried out by transposed convolution layer, 2D convolution layer and ReLU layer. A fully connected layer and softmax layer follows the decoding stage and feeds the pixel classification layer which could effectively descriminate the background and the foreground pixels. Segmentation by U-NET is chosen, as it is experimentally proven effective compared to the conventional active contour (Snake) model. US image pre-processing and segmentation being done by tailored diffusion filtering and U-NET DNN, respectively, the classification stage of the hybrid model is chosen by conducting twelve different experiments to select the best techniques for feature extraction, selection and classification tasks. Based on experimental results, Genetic algorithm that uses classification accuracy based fitness function is used to select the most appropriate texture features extracted by GLCM. Comparing the classification accuracies, Bagging tree classifier is used in the proposed hybrid model for effective classification of lesions. Flowchart of the proposed model is given in <xref ref-type="fig" rid="fig-6">Fig. 6</xref> whereas its algorithm is detailed in Algorithm 3.</p>
<fig id="fig-6">
<label>Figure 6</label>
<caption>
<title>Flowchart depicting the hybrid model algorithm</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_47961-fig-6.tif"/>
</fig>
<p>Image classification represents the final stage in image analysis. In our CAD system, ultrasound images are used and lesions are classified into normal, benign, or malignant. The Bagging Tree is an ensemble-based Bootstrap Aggregation algorithm that efficiently optimizes classifier robustness and accuracy, especially in high-dimensional data with missing values. Missing data values characterised by incomplete information about the patient or the anomaly being studied is a common issue with healthcare data analysis problems. Such a situation results from (i) information lost with time, (ii) missed data entry, (iii) incomplete data provided by patient or (iv) unskilled data annotation. Missing values result in degrading the prediction accuracy of an AI model. And thus developing AI model with robustness against missing values are gaining importance nowadays. Tree-based approaches and penalised regression approaches were found effective in handling high-dimensional data with missing values. One such approach is the Bagging tree classifier.</p>
<fig id="fig-13">
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_47961-fig-13.tif"/>
</fig>
</sec>
</sec>
<sec id="s5">
<label>5</label>
<title>Experimentation, Results and Analysis</title>
<sec id="s5_1">
<label>5.1</label>
<title>Experimental Setup</title>
<p>The experiment was conducted on a workstation with an Intel(R) Core(TM) i5 Processor, 8 GB RAM, Intel Iris Xe Graphics Card, and 512 GB SSD. The proposed novel hybrid model for the effective classification after segmentation of lesions from the BUSI database suggests the uses of eSRAD filtering in the pre-processing stage, U-NET-based Segmentation methodology followed by feature extraction and feature selection by using GLCM and Genetic Algorithm and classification by bagging tree algorithm. The effectiveness of the above hybrid model is proven compared to the most competitive algorithm in each step of image processing. The experiment is carried out using MATLAB software and the BUSI online dataset.</p>
</sec>
<sec id="s5_2">
<label>5.2</label>
<title>Performance Metrics</title>
<p>The efficiency of our proposed model evaluated for various performance metrics is presented in this section. Image samples were obtained from an online open-source database. We quantitatively assessed the algorithm&#x2019;s performance using metrics such as sensitivity, specificity, and accuracy. Among them, the Bagging Tree classifier demonstrated the highest performance. Error rates are measured from the false predictions say false positive and false negative values whereas the accuracy of a prediction is measured from the true positive and true negative results as compared to total predictions, respectively. Where, True Positive (TP) results when predicted output as well as the actual value are both true, and True Negative (TN) results when both predicted as well as the actual value are false. Similarly, False positive (FP) represents a condition when the predicted output is positive while the actual output is negative, and false negative (FN) results when the prediction is negative when the actual value is positive. The performance parameters used for evaluation are defined in <xref ref-type="table" rid="table-4">Table 4</xref> below</p>
<table-wrap id="table-4">
<label>Table 4</label>
<caption>
<title>Performance metrices considered for evaluating the model</title>
</caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th>Performance parameter</th>
<th>Definition and formula</th>
</tr>
</thead>
<tbody>
<tr>
<td><italic>Accuracy (ACC)</italic></td>
<td>The amount of correctly predicted outputs among the total outputs.</td>
</tr>
<tr>
<td/>
<td><inline-formula id="ieqn-22"><mml:math id="mml-ieqn-22"><mml:mrow><mml:mtext>Accuracy</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>+</mml:mo><mml:mi>T</mml:mi><mml:mi>N</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>P</mml:mi><mml:mo>+</mml:mo><mml:mi>T</mml:mi><mml:mi>N</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>N</mml:mi></mml:mrow></mml:mfrac></mml:mstyle></mml:math></inline-formula></td>
</tr>
<tr>
<td><italic>Sensitivity (SEN)</italic></td>
<td>The true positive rate, indicating the number of true positives among the predicted cases as compared to the actual positive cases.</td>
</tr>
<tr>
<td/>
<td><inline-formula id="ieqn-23"><mml:math id="mml-ieqn-23"><mml:mrow><mml:mtext>Sensitivity</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>N</mml:mi></mml:mrow></mml:mfrac></mml:mstyle></mml:math></inline-formula></td>
</tr>
<tr>
<td><italic>Specificity (SPEC) or Recall</italic></td>
<td>The true negative rate, representing the number of true negatives among the predicted cases as compared to the actual negative cases.</td>
</tr>
<tr>
<td/>
<td><inline-formula id="ieqn-24"><mml:math id="mml-ieqn-24"><mml:mrow><mml:mtext>Recall or Specificity</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mi>T</mml:mi><mml:mi>N</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mi>N</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>P</mml:mi></mml:mrow></mml:mfrac></mml:mstyle></mml:math></inline-formula></td>
</tr>
<tr>
<td><italic>Precision</italic></td>
<td>Precision is the ratio of true positive to the predicted number of positives. It is calculated based on the positive expected outcomes.</td>
</tr>
<tr>
<td/>
<td><inline-formula id="ieqn-25"><mml:math id="mml-ieqn-25"><mml:mrow><mml:mtext>Precision</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>P</mml:mi></mml:mrow></mml:mfrac></mml:mstyle></mml:math></inline-formula></td>
</tr>
<tr>
<td><italic>F-measure</italic></td>
<td>F-measure (harmonic mean of precision and recall), also called F1 score.</td>
</tr>
<tr>
<td/>
<td><inline-formula id="ieqn-26"><mml:math id="mml-ieqn-26"><mml:mrow><mml:mtext>F</mml:mtext></mml:mrow><mml:mn>1</mml:mn><mml:mrow><mml:mtext>&#xA0;measure</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mn>2</mml:mn><mml:mo>&#x2217;</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mi>p</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:mi>i</mml:mi><mml:mi>s</mml:mi><mml:mi>i</mml:mi><mml:mi>o</mml:mi><mml:mi>n</mml:mi><mml:mo>&#x2217;</mml:mo><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>l</mml:mi><mml:mi>l</mml:mi></mml:mrow><mml:mrow><mml:mi>p</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:mi>i</mml:mi><mml:mi>s</mml:mi><mml:mi>i</mml:mi><mml:mi>o</mml:mi><mml:mi>n</mml:mi><mml:mo>+</mml:mo><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:mi>a</mml:mi><mml:mi>l</mml:mi><mml:mi>l</mml:mi></mml:mrow></mml:mfrac></mml:mstyle></mml:math></inline-formula></td>
</tr>
<tr>
<td><italic>Jaccard index</italic></td>
<td>Jaccard index can quantify the similarities between the predicted output and the original input data. Jaccard&#x2019;s Index measures the degree of overlap between bounding boxes or masks.</td>
</tr>
<tr>
<td/>
<td><inline-formula id="ieqn-27"><mml:math id="mml-ieqn-27"><mml:mrow><mml:mtext>Jaccard index</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>N</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>P</mml:mi></mml:mrow></mml:mfrac></mml:mstyle><mml:mspace width="thinmathspace" /><mml:mspace width="thinmathspace" /><mml:mspace width="thinmathspace" /><mml:mspace width="thinmathspace" /><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mspace width="thinmathspace" /><mml:mspace width="thinmathspace" /><mml:mspace width="thinmathspace" /><mml:mspace width="thinmathspace" /><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mi>A</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>a</mml:mi><mml:mspace width="thinmathspace" /><mml:mspace width="thinmathspace" /><mml:mi>o</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thinmathspace" /><mml:mspace width="thinmathspace" /><mml:mi>O</mml:mi><mml:mi>v</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>l</mml:mi><mml:mi>a</mml:mi><mml:mi>p</mml:mi></mml:mrow><mml:mrow><mml:mi>A</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>a</mml:mi><mml:mspace width="thinmathspace" /><mml:mspace width="thinmathspace" /><mml:mi>o</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thinmathspace" /><mml:mspace width="thinmathspace" /><mml:mi>U</mml:mi><mml:mi>n</mml:mi><mml:mi>i</mml:mi><mml:mi>o</mml:mi><mml:mi>n</mml:mi></mml:mrow></mml:mfrac></mml:mstyle></mml:math></inline-formula></td>
</tr>
<tr>
<td><italic>Dice coefficient</italic></td>
<td>Dice Coefficient quantifies the similarity between two masks.</td>
</tr>
<tr>
<td/>
<td><inline-formula id="ieqn-28"><mml:math id="mml-ieqn-28"><mml:mrow><mml:mtext>Dice Coefficient</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mn>2</mml:mn><mml:mi>T</mml:mi><mml:mi>P</mml:mi></mml:mrow><mml:mrow><mml:mi>s</mml:mi><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>N</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>P</mml:mi></mml:mrow></mml:mfrac></mml:mstyle></mml:math></inline-formula></td>
</tr>
<tr>
<td><italic>Matthews correlation coefficient (MCC)</italic></td>
<td>MCC is used in machine learning as a measure of the quality of binary (two-class) classifications.</td>
</tr>
<tr>
<td/>
<td><inline-formula id="ieqn-29"><mml:math id="mml-ieqn-29"><mml:mrow><mml:mtext>MCC</mml:mtext></mml:mrow><mml:mo>=</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>&#x2217;</mml:mo><mml:mi>T</mml:mi><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>F</mml:mi><mml:mi>P</mml:mi><mml:mo>&#x2217;</mml:mo><mml:mi>F</mml:mi><mml:mi>N</mml:mi></mml:mrow><mml:msqrt><mml:mrow><mml:mo>(</mml:mo><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>P</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>N</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>T</mml:mi><mml:mi>N</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>P</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>T</mml:mi><mml:mi>N</mml:mi><mml:mo>+</mml:mo><mml:mi>F</mml:mi><mml:mi>N</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:msqrt></mml:mfrac></mml:mstyle></mml:math></inline-formula></td>
</tr>
<tr>
<td><italic>AUC under ROC</italic></td>
<td>Receiver Operating Characteristics ROC is a graphical performance measure which represents the performance of the classifier over a range of true positive rate vs. false negative rate. AUC specifies the area under the ROC curve and this measure varies between 0 and 1. A classifier with AUC near 1 is an indication of a good classification accuracy.</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s5_3">
<label>5.3</label>
<title>Results and Compartive Analysis</title>
<sec id="s5_3_1">
<label>5.3.1</label>
<title>Results Cum Comparison of Active Contour and U-NET-Based Segmentation</title>
<p>The training images were sourced from the augmented image datastore to train the network effectively by providing a slightly different dataset for each epoch of training. The dataset is divided into testing and training sets in the ratio 70:30. In addition, the name-value pair of (ColorPreProcessing, gray2rgb) is used to obtain uniform-sized images that contain the same number of channels as the input of the U-NET layer. The images are then normalized and fed to the neural network.</p>
<p>The U-NET neural network used in the segmentation step is 5 levels deep with each encoding stage consisting of 2 layers of 2D CNN, one ReLU, and one Max-pooling layer. The first stage of encoding has 32 neurons and, in each stage, the number of neurons doubles whereas the images are down-sampled. This network at the bottleneck has 512 neurons and the expanding stage has layers in which the image is up-sampled and the number of neurons reduced by half in each stage. Finally, a fully connected layer and softmax layer send the output to the pixel classification layer. The dice function is used to find the similarity coefficient of the segmentation result from the ground truth.</p>
<p>The Active Contours segmentation without edges [<xref ref-type="bibr" rid="ref-44">44</xref>] is used for comparison of segmentation performance. Such an active contour method is chosen as they are recommended for images whose foregrounds and backgrounds are statistically different and homogeneous similar to our source images. The active contour-based segmentation function implemented is run for a larger number of iterations to get a good segmentation response. Samples corresponding to 500 and 200 iterations are presented as examples. The performance parameters say Accuracy, Sensitivity, F-measure, Precision, MCC, Dice, Jaccard, and Specificity of segmentation are calculated using the function &#x201C;EvaluateImageSegmentationScores&#x201D; for the two different segmentation steps and it is found that U-NET outperformed the traditional active contour-based segmentation. <xref ref-type="fig" rid="fig-7">Fig. 7</xref> reveals the comparison of accuracy achieved in the case of using Active contour and U-NET algorithms for segmentation. Whereas the image outputs in <xref ref-type="fig" rid="fig-8">Figs. 8</xref> and <xref ref-type="fig" rid="fig-9">9</xref> show the input image and the output images corresponding to the Benign and Malignant classes after being segmented by the Active contour model and <xref ref-type="fig" rid="fig-10">Figs. 10a</xref> and <xref ref-type="fig" rid="fig-10">10b</xref> reveal the segmentation results from the UNET model. <xref ref-type="table" rid="table-5">Table 5</xref> presents the performance metrics of the segmentation step.</p>
<fig id="fig-7">
<label>Figure 7</label>
<caption>
<title>Graphical representation between U-NET and active contour</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_47961-fig-7.tif"/>
</fig><fig id="fig-8">
<label>Figure 8</label>
<caption>
<title>Active contour-based segmentation on Benign</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_47961-fig-8.tif"/>
</fig><fig id="fig-9">
<label>Figure 9</label>
<caption>
<title>Active contour-based segmentation on Malignant</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_47961-fig-9.tif"/>
</fig><fig id="fig-10">
<label>Figure 10</label>
<caption>
<title>(a) The segmented output of benign. (b) The segmented output of malignant</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_47961-fig-10.tif"/>
</fig><table-wrap id="table-5">
<label>Table 5</label>
<caption>
<title>Evaluation metrics depicting the performance of segmentation techniques</title>
</caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th rowspan="2">Evaluation metrics (%)</th>
<th colspan="2" align="center">Image-1</th>
<th colspan="2" align="center">Image-2</th>
</tr>
<tr>
<th>Active contour model</th>
<th>U-NET model</th>
<th>Active contour model</th>
<th>U-NET model</th>
</tr>
</thead>
<tbody>
<tr>
<td>Accuracy</td>
<td>86</td>
<td>98</td>
<td>87</td>
<td>98.5</td>
</tr>
<tr>
<td>Sensitivity</td>
<td>75</td>
<td>98</td>
<td>81</td>
<td>97</td>
</tr>
<tr>
<td>Specificity</td>
<td>87</td>
<td>98</td>
<td>89</td>
<td>98</td>
</tr>
<tr>
<td>F-measure</td>
<td>87</td>
<td>99</td>
<td>86</td>
<td>98</td>
</tr>
<tr>
<td>Precision</td>
<td>86</td>
<td>91</td>
<td>84</td>
<td>95</td>
</tr>
<tr>
<td>MCC</td>
<td>79</td>
<td>94</td>
<td>86</td>
<td>94</td>
</tr>
<tr>
<td>DC</td>
<td>89</td>
<td>97</td>
<td>89</td>
<td>98</td>
</tr>
<tr>
<td>Jaccard index</td>
<td>89</td>
<td>98</td>
<td>91</td>
<td>97</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s5_3_2">
<label>5.3.2</label>
<title>Comparison of Classification Accuracy by Different Machine Learning Classifiers</title>
<p>A Multi-scale wavelet transform with a window size of 5, window spacing of 5, and a sampling frequency of 50 is used to extract the features of the image dataset say Energy, Variance, Standard Deviation, and Waveform Length. A Grey Level Co-occurrence matric is created for the images as GLCM is a direct indication of the horizontal proximity of the pixels and thus could better reveal the correlation between the pixels in an image. As a classification of the lesions mainly depends on the texture of the image, the texture features of the image say contrast&#x2014;a measure of the variance, correlation&#x2014;probability of occurrence, energy&#x2014;the sum of the square of each element, homogeneity&#x2014;the closeness of each element is extracted from the properties of GLCM matrix.</p>
<p>Feature selection by Leave one-out cross-validation and genetic algorithm is done followed by a classification task to determine the best feature selection algorithm and classifier for the hybrid model. LOOCV feature selection uses the mean squared error values as the determining factor to select a feature and the different classifiers are run to determine the classification accuracy. Genetic algorithm is an evolutionary algorithm though initially developed to obtain optimized solutions, that are found effective in selecting the best features for classification tasks. The selection operator finds the best fitting features whereas the mutation and cross-over operators diversify the search space. The classification accuracy is considered as the fitness function to determine the features to be selected for classification. Classification of the images by three classifiers random forest, neural network, and bagging tree is carried out to determine the best classifier.</p>
<p>The performance of the classifier for different combinations of feature extraction, feature selection, and classification resulted in running 12 different combinations of experiments, and the corresponding performances are measured by parameters say Accuracy, sensitivity, specificity, and ROC. Accuracy, sensitivity, and specificity are measures based on confusion matrices whereas ROC is measured from the ROC curves. Receiver Operating Characteristics ROC is a graphical performance measure which represents the performance of the classifier over a range of true positive rate <italic>vs.</italic> false negative rate. A good classifier will have a ROC above 95% and the area under the ROC curve will be nearing 1, i.e., with an AUC value nearing 1. The ROC curves for the 12 different experiments are depicted in <xref ref-type="fig" rid="fig-11">Figs. 11a</xref> to <xref ref-type="fig" rid="fig-11">11l</xref>. The performance of the classification task is represented graphically in <xref ref-type="fig" rid="fig-12">Fig. 12</xref> and <xref ref-type="table" rid="table-6">Table 6</xref> provides the observed values of performance parameters for different combinations of feature extraction algorithms, feature selection algorithms, and classifiers. It highlights the varying performance across different scenarios, emphasizing the Bagging Tree classifier&#x2019;s superiority in several cases.</p>
<fig id="fig-11">
<label>Figure 11</label>
<caption>
<title>(a&#x2013;c) ROC curves obtained when wavelet transformed feature extraction and LOOCV-based feature selection are used for classification. (d&#x2013;f) ROC curves obtained when wavelet transformed feature extraction and Genetic Algorithm based feature selection is used (g&#x2013;i) ROC curves obtained when GLCM based feature extraction and LOOCV based feature selection (j&#x2013;l) ROC curves obtained when GLCM based feature extraction and Genetic Algorithm based feature selection</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_47961-fig-11a.tif"/>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_47961-fig-11b.tif"/>
</fig><fig id="fig-12">
<label>Figure 12</label>
<caption>
<title>Accuracy of the classifiers</title>
</caption>
<graphic mimetype="image" mime-subtype="tif" xlink:href="CMC_47961-fig-12.tif"/>
</fig><table-wrap id="table-6">
<label>Table 6</label>
<caption>
<title>Observed values of performance parameters on different classifiers</title>
</caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th>Feature extraction</th>
<th>Feature selection</th>
<th>Classifier</th>
<th>Accuracy</th>
<th>Sensitivity</th>
<th>Specificity</th>
<th>AUC of ROC</th>
</tr>
</thead>
<tbody>
<tr>
<td rowspan="6">Wavelet</td>
<td rowspan="3">LOOCV</td>
<td>Random forest</td>
<td>88.4</td>
<td>94.18</td>
<td>91.27</td>
<td>0.95</td>
</tr>
<tr>
<td><bold>Bagging trees</bold></td>
<td><bold>98.81</bold></td>
<td><bold>94.10</bold></td>
<td><bold>96.12</bold></td>
<td><bold>0.99</bold></td>
</tr>
<tr>
<td>Neural network</td>
<td>85.4</td>
<td>83.59</td>
<td>85.16</td>
<td>0.97</td>
</tr>
<tr>
<td rowspan="3">Genetic algorithm</td>
<td>Random forest</td>
<td>90.5</td>
<td>93.28</td>
<td>88.90</td>
<td>0.97</td>
</tr>
<tr>
<td><bold>Bagging trees</bold></td>
<td><bold>99.3</bold></td>
<td><bold>98.98</bold></td>
<td><bold>97.10</bold></td>
<td><bold>0.98</bold></td>
</tr>
<tr>
<td>Neural network</td>
<td>71.4</td>
<td>84.31</td>
<td>87.90</td>
<td>0.96</td>
</tr>
<tr>
<td rowspan="6">GLCM</td>
<td rowspan="3">LOOCV</td>
<td>Random forest</td>
<td>72.2</td>
<td>66.11</td>
<td>88.71</td>
<td>0.84</td>
</tr>
<tr>
<td><bold>Bagging trees</bold></td>
<td><bold>99.7</bold></td>
<td><bold>99.00</bold></td>
<td><bold>99.00</bold></td>
<td><bold>0.98</bold></td>
</tr>
<tr>
<td>Neural network</td>
<td>71.6</td>
<td>69.29</td>
<td>84.02</td>
<td>0.84</td>
</tr>
<tr>
<td rowspan="3">Genetic algorithm</td>
<td>Random forest</td>
<td>84.5</td>
<td>81.49</td>
<td>94.86</td>
<td>0.94</td>
</tr>
<tr>
<td><bold>Bagging trees</bold></td>
<td><bold>99.9</bold></td>
<td><bold>99.50</bold></td>
<td><bold>99.50</bold></td>
<td><bold>0.97</bold></td>
</tr>
<tr>
<td>Neural network</td>
<td>95.9</td>
<td>95.29</td>
<td>88.50</td>
<td>0.95</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The comparison of various performance parameters from <xref ref-type="table" rid="table-6">Table 6</xref> reveals that the classification by the Bagging Tree algorithm consistently outperforms Random Forest and Neural Network when features were extracted and selected using GLCM and GA.</p>
</sec>
<sec id="s5_3_3">
<label>5.3.3</label>
<title>Comparison of the Proposed Model with Models Existing in the Literature</title>
<p>To evaluate the performance of our proposed method, we have conducted a comparative study with different research papers and existing techniques. Some of these papers employed similar techniques but with different algorithms [<xref ref-type="bibr" rid="ref-21">21</xref>,<xref ref-type="bibr" rid="ref-23">23</xref>,<xref ref-type="bibr" rid="ref-45">45</xref>,<xref ref-type="bibr" rid="ref-46">46</xref>], while others explored ensemble methods with distinct datasets [<xref ref-type="bibr" rid="ref-29">29</xref>] and [<xref ref-type="bibr" rid="ref-47">47</xref>]. <xref ref-type="table" rid="table-7">Table 7</xref>, provided below, showcases our proposed method&#x2019;s performance in comparison to other studies. The proposed method consistently outperforms existing work, achieving superior accuracy levels. This comparison highlights that the proposed approach, which combines U-NET, Genetic Algorithm, and Bagging Trees, achieved the highest accuracy of 99.9% which surpasses the performance of other techniques and underscores the effectiveness of our methodology.</p>
<table-wrap id="table-7">
<label>Table 7</label>
<caption>
<title>Comparative study with existing methods</title>
</caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th>Work</th>
<th>Method</th>
<th>Accuracy</th>
</tr>
</thead>
<tbody>
<tr>
<td>Proposed method</td>
<td>eSRAD, U-NET, Genetic algorithm, Bagging tree</td>
<td>99.9%</td>
</tr>
<tr>
<td>Chithrakkannan et al. [<xref ref-type="bibr" rid="ref-2">2</xref>]</td>
<td>GLCM</td>
<td>96%</td>
</tr>
<tr>
<td>Byra et al. [<xref ref-type="bibr" rid="ref-3">3</xref>]</td>
<td>Selective Kernel U-NET</td>
<td>97.9%</td>
</tr>
<tr>
<td>Tarighat [<xref ref-type="bibr" rid="ref-23">23</xref>]</td>
<td>U-NET</td>
<td>91%</td>
</tr>
<tr>
<td>Naveed [<xref ref-type="bibr" rid="ref-39">39</xref>]</td>
<td>Random forest</td>
<td>98%</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The proposed model could achieve the highest accuracy of 99.9% due to its inherent tailored image processing stages. The pre-processing and the subsequent image-processing stages are carefully curated after comparing the most effective algorithms in each stage. The highlighting features that pawed way for achieving this high result include (i) eSRAD filtering: Ultrasound images are corrupted by speckle noise and hence effective speckle reduction is carried out by an enhanced Speckle Reducing Anisotropic Diffusion filter with diffusion coefficients tailored by an instantaneous coefficient of variance, ICOV and the output PSNR reveals the effectiveness of the eSRAD filtering (ii) 5-level deep U-NET based segmentation that could segment the lesions which are proven from the accuracy of the results achieved and (iii) ensemble-based bagging tree classifier that utilizes an ensemble of different classifiers to predict the classifier with the highest probable prediction accuracy. Thus, the trained hybrid model could make predictions with the highest accuracy.</p>
</sec>
</sec>
</sec>
<sec id="s6">
<label>6</label>
<title>Conclusion and Future Scope</title>
<p>The research heralds a ground-breaking methodology for breast ultrasound image classification, presenting innovative approaches and demonstrating remarkable achievements. The hybrid model, comprising Enhanced SRAD for speckle noise reduction, U-NET-based segmentation, Genetic Algorithm-driven feature selection, and classification employing Bagging Trees, stands as a testament to meticulous development through rigorous comparisons, with the overarching aim to enhance the accuracy and efficiency of ultrasound image classification. The culmination of these efforts resulted in an astounding 99.9% accuracy rate in classifying breast lesions into normal, malignant, or benign tissues. The significance of our research lies in its potential to revolutionize breast cancer diagnosis, promising increased accuracy and efficiency in clinical settings.</p>
<p>The main highlights of our work revolve around Enhanced SRAD&#x2019;s noise reduction capabilities, the precision of U-NET-based Segmentation, the efficacy of Genetic Algorithm-driven Feature Selection, and the robustness of Bagging Trees Classification. However, our study is not without limitations. Expanding the dataset to encompass a broader and more diverse range of ultrasound images presents a critical avenue for further validation of the robustness and generalizability of our methodology. A more extensive dataset can fortify the model&#x2019;s capabilities to adapt to varied scenarios and patient demographics, enhancing its real-world applicability and reliability. Moreover, while our methodology has showcased impressive accuracy, continual refinement and optimization remain imperative. Efforts to reduce computational overhead while maintaining or even enhancing accuracy will pave the way for more seamless integration into clinical practice.</p>
<p>In the near future, we plan to perform collaborations with medical institutions for real-time validation and integration of the proposed methodology into clinical workflows that can propel its adoption and ensure its utility in aiding healthcare professionals. And, in addition, there is a plan to test and validate the proposed hybrid methodology on more advanced datasets.</p>
</sec>
</body>
<back><ack><p>The authors acknowledge and extend their appreciation to the Researchers Supporting Project Number (RSPD2024R996), King Saud University, Riyadh, Saudi Arabia for funding this study.</p>
</ack>
<sec><title>Funding Statement</title>
<p>This research was funded through Researchers Supporting Project Number (RSPD2024R996), King Saud University, Riyadh, Saudi Arabia.</p>
</sec>
<sec><title>Author Contributions</title>
<p>The authors confirm contribution to the paper as follows: study conception and design: S. Shobana; data collection: Judith Justin; analysis and interpretation of results: R. Vanithamani, Miguel Villag&#x00F3;mez Galindo, Mushtaq Ahmad Ansari, draft manuscript preparation and critical revision: K. Umapathi, Anand Nayyar, Hitesh Panchal. All authors reviewed the results and approved the final version of the manuscript.</p>
</sec>
<sec sec-type="data-availability"><title>Availability of Data and Materials</title>
<p>Data will be made available on request.</p>
</sec>
<sec sec-type="COI-statement"><title>Conflicts of Interest</title>
<p>The authors declare that they have no conflicts of interest to report regarding the present study.</p>
</sec>
<ref-list content-type="authoryear">
<title>References</title>
<ref id="ref-1"><label>[1]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J.</given-names> <surname>Baek</surname></string-name>, <string-name><given-names>A. M.</given-names> <surname>O&#x2019;Connell</surname></string-name>, and <string-name><given-names>K. J.</given-names> <surname>Parker</surname></string-name></person-group>, &#x201C;<article-title>Improving breast cancer diagnosis by incorporating raw ultrasound parameters into machine learning</article-title>,&#x201D; <source>Mach. Learn.: Sci. Technol.</source>, vol. <volume>3</volume>, no. <issue>4</issue>, pp. <fpage>045013</fpage>, <year>Dec. 2022</year>. doi: <pub-id pub-id-type="doi">10.1088/2632-2153/ac9bcc</pub-id>; <pub-id pub-id-type="pmid">36698865</pub-id></mixed-citation></ref>
<ref id="ref-2"><label>[2]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>R.</given-names> <surname>Chithrakkannan</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Kavitha</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Mangayarkarasi</surname></string-name>, and <string-name><given-names>R.</given-names> <surname>Karthikeyan</surname></string-name></person-group>, &#x201C;<article-title>Breast cancer detection using machine learning</article-title>,&#x201D; <source>Int. J. Innov. Technol. Expl. Eng. (IJITEE)</source>, vol. <volume>8</volume>, no. <issue>11</issue>, pp. <fpage>3123</fpage>&#x2013;<lpage>3126</lpage>, <year>Sept. 2019</year>. doi: <pub-id pub-id-type="doi">10.35940/ijitee.K2498.0981119</pub-id>.</mixed-citation></ref>
<ref id="ref-3"><label>[3]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Byra</surname></string-name> <etal>et al.</etal></person-group>, &#x201C;<article-title>Breast mass segmentation in ultrasound with selective kernel U-Net convolutional neural network</article-title>,&#x201D; <source>Biomed. Signal Process. Control</source>, vol. <volume>61</volume>, pp. <fpage>102027</fpage>, <year>Jun. 2020</year>. doi: <pub-id pub-id-type="doi">10.1016/j.bspc.2020.102027</pub-id>; <pub-id pub-id-type="pmid">34703489</pub-id></mixed-citation></ref>
<ref id="ref-4"><label>[4]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Amiri</surname></string-name>, <string-name><given-names>R.</given-names> <surname>Brooks</surname></string-name>, <string-name><given-names>B.</given-names> <surname>Behboodi</surname></string-name>, and <string-name><given-names>H.</given-names> <surname>Rivaz</surname></string-name></person-group>, &#x201C;<article-title>Two-stage ultrasound image segmentation using U-Net and test time augmentation</article-title>,&#x201D; <source>Int. J. Comput. Assist. Radiol. Surg.</source>, vol. <volume>15</volume>, no. <issue>6</issue>, pp. <fpage>981</fpage>&#x2013;<lpage>988</lpage>, <year>Apr. 2020</year>. doi: <pub-id pub-id-type="doi">10.1007/s11548-020-02158-3</pub-id>; <pub-id pub-id-type="pmid">32350786</pub-id></mixed-citation></ref>
<ref id="ref-5"><label>[5]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>X.</given-names> <surname>Zou</surname></string-name> <etal>et al.</etal></person-group>, &#x201C;<article-title>Improved breast ultrasound tumor classification using dual-input CNN with GAP-guided attention loss</article-title>,&#x201D; <source>Math. Biosci. Eng.</source>, vol. <volume>20</volume>, no. <issue>8</issue>, pp. <fpage>15244</fpage>&#x2013;<lpage>15264</lpage>, <year>Jul. 2023</year>. doi: <pub-id pub-id-type="doi">10.3934/mbe.2023682</pub-id>; <pub-id pub-id-type="pmid">37679179</pub-id></mixed-citation></ref>
<ref id="ref-6"><label>[6]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Chaudhury</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Sau</surname></string-name></person-group>, &#x201C;<article-title>Classification of breast masses using ultrasound images by approaching GAN, transfer learning and deep learning techniques</article-title>,&#x201D; <source>J. Artif. Intell. Technol.</source>, vol. <volume>3</volume>, pp. <fpage>142</fpage>&#x2013;<lpage>153</lpage>, <year>May 2023</year>. doi: <pub-id pub-id-type="doi">10.37965/jait.2023.0175</pub-id>.</mixed-citation></ref>
<ref id="ref-7"><label>[7]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Ragab</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Albukhari</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Alyami</surname></string-name>, and <string-name><given-names>R.</given-names> <surname>Mansour</surname></string-name></person-group>, &#x201C;<article-title>Ensemble deep-learning-enabled clinical decision support system for breast cancer diagnosis and classification on ultrasound images</article-title>,&#x201D; <source>Biol.</source>, vol. <volume>11</volume>, no. <issue>3</issue>, pp. <fpage>439</fpage>, <year>Mar. 2022</year>. doi: <pub-id pub-id-type="doi">10.3390/biology11030439</pub-id>; <pub-id pub-id-type="pmid">35336813</pub-id></mixed-citation></ref>
<ref id="ref-8"><label>[8]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Nazir</surname></string-name> <etal>et al.</etal></person-group>, &#x201C;<article-title>A novel CNN-inception-V4-based hybrid approach for classification of breast cancer in mammogram images</article-title>,&#x201D; <source>Wirel. Commun. Mobile Comput.</source>, vol. <volume>2022</volume>, pp. <fpage>1</fpage>&#x2013;<lpage>10</lpage>, <year>Jul. 2022</year>. doi: <pub-id pub-id-type="doi">10.1155/2022/5089078</pub-id>.</mixed-citation></ref>
<ref id="ref-9"><label>[9]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>P.</given-names> <surname>Labcharoenwongs</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Vonganansup</surname></string-name>, <string-name><given-names>O.</given-names> <surname>Chunhapran</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Noolek</surname></string-name>, and <string-name><given-names>T.</given-names> <surname>Yampaka</surname></string-name></person-group>, &#x201C;<article-title>An automatic breast tumor detection and classification including automatic tumor volume estimation using deep learning technique</article-title>,&#x201D; <source>Asian Pac. J. Cancer Prev.</source>, vol. <volume>24</volume>, no. <issue>3</issue>, pp. <fpage>1081</fpage>&#x2013;<lpage>1088</lpage>, <year>Mar. 2023</year>. doi: <pub-id pub-id-type="doi">10.31557/APJCP.2023.24.3.1081</pub-id>; <pub-id pub-id-type="pmid">36974564</pub-id></mixed-citation></ref>
<ref id="ref-10"><label>[10]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Hassanien</surname></string-name>, <string-name><given-names>V.</given-names> <surname>Singh</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Puig</surname></string-name>, and <string-name><given-names>M.</given-names> <surname>Abdel-Nasser</surname></string-name></person-group>, &#x201C;<article-title>Predicting breast tumor malignancy using deep ConvNeXt radiomics and quality-based score pooling in ultrasound sequences</article-title>,&#x201D; <source>Diagnostics</source>, vol. <volume>12</volume>, no. <issue>5</issue>, pp. <fpage>1053</fpage>, <year>2022</year>. doi: <pub-id pub-id-type="doi">10.3390/diagnostics12051053</pub-id>; <pub-id pub-id-type="pmid">35626208</pub-id></mixed-citation></ref>
<ref id="ref-11"><label>[11]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Y.</given-names> <surname>Wang</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Kuo</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Chou</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Su</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Huang</surname></string-name>, and <string-name><given-names>C.</given-names> <surname>Chen</surname></string-name></person-group>, &#x201C;<article-title>Breast tumor classification using short&#x2014;ResNet with pixel-based tumor probability map in ultrasound images</article-title>,&#x201D; <source>Ultrasonic Imaging</source>, vol. <volume>45</volume>, no. <issue>2</issue>, pp. <fpage>74</fpage>&#x2013;<lpage>84</lpage>, <year>Mar. 2023</year>. doi: <pub-id pub-id-type="doi">10.1177/01617346231162906</pub-id>; <pub-id pub-id-type="pmid">36951105</pub-id></mixed-citation></ref>
<ref id="ref-12"><label>[12]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>G.</given-names> <surname>Sangha</surname></string-name> <etal>et al.</etal></person-group>, &#x201C;<article-title>Assessment of photoacoustic tomography contrast for breast tissue imaging using 3D correlative virtual histology</article-title>,&#x201D; <source>Sci. Rep.</source>, vol. <volume>12</volume>, no. <issue>1</issue>, pp. <fpage>438</fpage>, <year>Feb. 2022</year>. doi: <pub-id pub-id-type="doi">10.1038/s41598-022-06501-3</pub-id>; <pub-id pub-id-type="pmid">35169198</pub-id></mixed-citation></ref>
<ref id="ref-13"><label>[13]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>F.</given-names> <surname>Zerrad</surname></string-name> <etal>et al.</etal></person-group>, &#x201C;<article-title>Microwave imaging approach for breast cancer detection using a tapered slot antenna loaded with parasitic components</article-title>,&#x201D; <source>Mater.</source>, vol. <volume>16</volume>, no. <issue>4</issue>, pp. <fpage>1496</fpage>, <year>Feb. 2023</year>. doi: <pub-id pub-id-type="doi">10.3390/ma16041496</pub-id>; <pub-id pub-id-type="pmid">36837126</pub-id></mixed-citation></ref>
<ref id="ref-14"><label>[14]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Y.</given-names> <surname>Anupama</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Amutha</surname></string-name>, and <string-name><given-names>R.</given-names> <surname>Ramesh</surname></string-name></person-group>, &#x201C;<article-title>An efficient preprocessing technique for multimodality breast cancer images</article-title>,&#x201D; <source>Int. J. Online Biomed. Eng. (IIOE)</source>, vol. <volume>19</volume>, no. <issue>8</issue>, pp. <fpage>88</fpage>&#x2013;<lpage>96</lpage>, <year>Jun. 2023</year>. doi: <pub-id pub-id-type="doi">10.3991/ijoe.v19i08.40043</pub-id>.</mixed-citation></ref>
<ref id="ref-15"><label>[15]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Elbaz</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Elwahsh</surname></string-name>, and <string-name><given-names>I.</given-names> <surname>El-Henawy</surname></string-name></person-group>, &#x201C;<article-title>Proposed framework for detection of breast tumors</article-title>,&#x201D; <source>Comput. Mater. Continua</source>, vol. <volume>74</volume>, no. <issue>2</issue>, pp. <fpage>2927</fpage>&#x2013;<lpage>2944</lpage>, <year>Oct. 2023</year>. doi: <pub-id pub-id-type="doi">10.32604/cmc.2023.033111</pub-id>.</mixed-citation></ref>
<ref id="ref-16"><label>[16]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A. E.</given-names> <surname>Ilesanmi</surname></string-name>, <string-name><given-names>O. P.</given-names> <surname>Idowu</surname></string-name>, <string-name><given-names>U.</given-names> <surname>Chaumrattanakul</surname></string-name>, and <string-name><given-names>S. S.</given-names> <surname>Makhanov</surname></string-name></person-group>, &#x201C;<article-title>Multiscale hybrid algorithm for pre-processing of ultrasound images</article-title>,&#x201D; <source>Biomed. Signal. Process. Control</source>, vol. <volume>66</volume>, no. <issue>3</issue>, pp. <fpage>102396</fpage>, <year>Jan. 2021</year>. doi: <pub-id pub-id-type="doi">10.1016/j.bspc.2020.102396</pub-id>.</mixed-citation></ref>
<ref id="ref-17"><label>[17]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>H.</given-names> <surname>Choi</surname></string-name> and <string-name><given-names>J.</given-names> <surname>Jeong</surname></string-name></person-group>, &#x201C;<article-title>Despeckling algorithm for removing speckle noise from ultrasound images</article-title>,&#x201D; <source>Symmetry</source>, vol. <volume>12</volume>, no. <issue>6</issue>, pp. <fpage>938</fpage>, <year>Jun. 2020</year>. doi: <pub-id pub-id-type="doi">10.3390/sym12060938</pub-id>.</mixed-citation></ref>
<ref id="ref-18"><label>[18]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>X.</given-names> <surname>Li</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Wang</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Zhao</surname></string-name>, and <string-name><given-names>Y.</given-names> <surname>Wei</surname></string-name></person-group>, &#x201C;<article-title>Fast speckle noise suppression algorithm in breast ultrasound image using three-dimensional deep learning</article-title>,&#x201D; <source>Front. Physiol.</source>, vol. <volume>13</volume>, pp. <fpage>zsaa112</fpage>, <year>Apr. 2022</year>. doi: <pub-id pub-id-type="doi">10.3389/fphys.2022.880966</pub-id>; <pub-id pub-id-type="pmid">35492597</pub-id></mixed-citation></ref>
<ref id="ref-19"><label>[19]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>D.</given-names> <surname>Wulff</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Mehdi</surname></string-name>, <string-name><given-names>F.</given-names> <surname>Ernst</surname></string-name>, and <string-name><given-names>J.</given-names> <surname>Hagenah</surname></string-name></person-group>, &#x201C;<article-title>Cross data set generalization of ultrasound image augmentation using representation learning: A case study</article-title>,&#x201D; <source>Curr. Dir. Biomed. Eng.</source>, vol. <volume>7</volume>, no. <issue>2</issue>, pp. <fpage>755</fpage>&#x2013;<lpage>758</lpage>, <year>Oct. 2021</year>. doi: <pub-id pub-id-type="doi">10.1515/cdbme-2021-2193</pub-id>.</mixed-citation></ref>
<ref id="ref-20"><label>[20]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>C.</given-names> <surname>Keatmanee</surname></string-name>, <string-name><given-names>U.</given-names> <surname>Chaumrattanakul</surname></string-name>, <string-name><given-names>K.</given-names> <surname>Kotani</surname></string-name>, and <string-name><given-names>S. S.</given-names> <surname>Makhanov</surname></string-name></person-group>, &#x201C;<article-title>Initialization of active contours for segmentation of breast cancer via fusion of Ultrasound, Doppler, and elasticity images</article-title>,&#x201D; <source>Ultrasonics</source>, vol. <volume>94</volume>, no. <issue>6</issue>, pp. <fpage>438</fpage>&#x2013;<lpage>453</lpage>, <year>2019</year>. doi: <pub-id pub-id-type="doi">10.1016/j.ultras.2017.12.008</pub-id>; <pub-id pub-id-type="pmid">29477236</pub-id></mixed-citation></ref>
<ref id="ref-21"><label>[21]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Rodtook</surname></string-name>, <string-name><given-names>K.</given-names> <surname>Kirimasthong</surname></string-name>, <string-name><given-names>W.</given-names> <surname>Lohitvisate</surname></string-name>, and <string-name><given-names>S. S.</given-names> <surname>Makhanov</surname></string-name></person-group>, &#x201C;<article-title>Automatic initialization of active contours and level set method in ultrasound images of breast abnormalities</article-title>,&#x201D; <source>Pattern Recognit.</source>, vol. <volume>79</volume>, no. <issue>4</issue>, pp. <fpage>172</fpage>&#x2013;<lpage>182</lpage>, <year>Jul. 2018</year>. doi: <pub-id pub-id-type="doi">10.1016/j.patcog.2018.01.032</pub-id>.</mixed-citation></ref>
<ref id="ref-22"><label>[22]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Iqbal</surname></string-name> and <string-name><given-names>M.</given-names> <surname>Sharif</surname></string-name></person-group>, &#x201C;<article-title>MDA-Net: Multiscale dual attention-based network for breast lesion segmentation using ultrasound images</article-title>,&#x201D; <source>Comput. Inf. Sci.</source>, vol. <volume>34</volume>, no. <issue>9</issue>, pp. <fpage>7283</fpage>&#x2013;<lpage>7299</lpage>, <year>Oct. 2022</year>. doi: <pub-id pub-id-type="doi">10.1016/j.jksuci.2021.10.002</pub-id>.</mixed-citation></ref>
<ref id="ref-23"><label>[23]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A. P.</given-names> <surname>Tarighat</surname></string-name></person-group>, &#x201C;<article-title>Breast tumor segmentation using deep learning by U-Net network</article-title>,&#x201D; <source>J. Telecommun.&#x2014;Electron. Comput. Eng. (JTEC)</source>, vol. <volume>13</volume>, no. <issue>2</issue>, pp. <fpage>49</fpage>&#x2013;<lpage>54</lpage>, <year>Jun. 2021</year>.</mixed-citation></ref>
<ref id="ref-24"><label>[24]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>T.</given-names> <surname>Zhao</surname></string-name> and <string-name><given-names>H.</given-names> <surname>Dai</surname></string-name></person-group>, &#x201C;<article-title>Breast tumor ultrasound image segmentation method based on improved residual U-Net network</article-title>,&#x201D; <source>Comput. Intell. Neurosci.</source>, vol. <volume>2022</volume>, pp. <fpage>9</fpage>, <year>Jun. 2022</year>. doi: <pub-id pub-id-type="doi">10.1155/2022/3905998</pub-id>; <pub-id pub-id-type="pmid">35795762</pub-id></mixed-citation></ref>
<ref id="ref-25"><label>[25]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Z.</given-names> <surname>Zhou</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Siddiquee</surname></string-name>, <string-name><given-names>N.</given-names> <surname>Tajbakhsh</surname></string-name>, and <string-name><given-names>J.</given-names> <surname>Liang</surname></string-name></person-group>, &#x201C;<article-title>UNet&#x002B;&#x002B;: Redesigning skip connections to exploit multiscale features in image segmentation</article-title>,&#x201D; <source>IEEE Trans. Med. Imaging</source>, vol. <volume>39</volume>, no. <issue>6</issue>, pp. <fpage>1856</fpage>&#x2013;<lpage>1867</lpage>, <year>Jun. 2020</year>. doi: <pub-id pub-id-type="doi">10.1109/TMI.2019.2959609</pub-id>; <pub-id pub-id-type="pmid">31841402</pub-id></mixed-citation></ref>
<ref id="ref-26"><label>[26]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>N.</given-names> <surname>Siddique</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Sidike</surname></string-name>, <string-name><given-names>C.</given-names> <surname>Elkin</surname></string-name>, and <string-name><given-names>V.</given-names> <surname>Devabhaktuni</surname></string-name></person-group>, &#x201C;<article-title>U-net and its variants for medical image segmentation: A review of theory and applications</article-title>,&#x201D; <source>IEEE Access</source>, vol. <volume>9</volume>, pp. <fpage>82031</fpage>&#x2013;<lpage>82057</lpage>, <year>Jun. 2021</year>. doi: <pub-id pub-id-type="doi">10.1109/access.2021.3086020</pub-id>.</mixed-citation></ref>
<ref id="ref-27"><label>[27]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>T.</given-names> <surname>Liu</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Wang</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Christian</surname></string-name>, <string-name><given-names>C.</given-names> <surname>Chang</surname></string-name>, <string-name><given-names>F.</given-names> <surname>Lai</surname></string-name> and <string-name><given-names>H.</given-names> <surname>Tai</surname></string-name></person-group>, &#x201C;<article-title>Automatic segmentation and measurement of pressure injuries using deep learning models and a lidar camera</article-title>,&#x201D; <source>Sci. Rep.</source>, vol. <volume>13</volume>, no. <issue>680</issue>, pp. <fpage>39</fpage>, <year>Jan. 2023</year>. doi: <pub-id pub-id-type="doi">10.1038/s41598-022-26812-9</pub-id>; <pub-id pub-id-type="pmid">36639395</pub-id></mixed-citation></ref>
<ref id="ref-28"><label>[28]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>N.</given-names> <surname>Ibtehaz</surname></string-name> and <string-name><given-names>M.</given-names> <surname>Rahman</surname></string-name></person-group>, &#x201C;<article-title>MultiResUNet: Rethinking the u-net architecture for multimodal biomedical image segmentation</article-title>,&#x201D; <source>Neural Netw.</source>, vol. <volume>121</volume>, pp. <fpage>74</fpage>&#x2013;<lpage>87</lpage>, <year>Jan. 2020</year>. doi: <pub-id pub-id-type="doi">10.1016/j.neunet.2019.08.025</pub-id>; <pub-id pub-id-type="pmid">31536901</pub-id></mixed-citation></ref>
<ref id="ref-29"><label>[29]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Y.</given-names> <surname>Hu</surname></string-name> <etal>et al.</etal></person-group>, &#x201C;<article-title>Automatic tumor segmentation in breast ultrasound images using a dilated fully convolutional network combined with an active contour model</article-title>,&#x201D; <source>Med. Phys.</source>, vol. <volume>46</volume>, no. <issue>1</issue>, pp. <fpage>215</fpage>&#x2013;<lpage>228</lpage>, <year>Oct. 2018</year>. doi: <pub-id pub-id-type="doi">10.1002/mp.13268</pub-id>; <pub-id pub-id-type="pmid">30374980</pub-id></mixed-citation></ref>
<ref id="ref-30"><label>[30]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>F.</given-names> <surname>Torres</surname></string-name> <etal>et al.</etal></person-group>, &#x201C;<article-title>Lesion detection in breast ultrasound images using a machine learning approach and genetic optimization</article-title>,&#x201D; in <source>Pattern Recognition and Image Analysis</source>, pp. <fpage>289</fpage>&#x2013;<lpage>301</lpage>. doi: <pub-id pub-id-type="doi">10.1007/978-3-030-31332-6_26</pub-id>.</mixed-citation></ref>
<ref id="ref-31"><label>[31]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>V.</given-names> <surname>Jackins</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Vimal</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Kaliappan</surname></string-name>, and <string-name><given-names>M. Y.</given-names> <surname>Lee</surname></string-name></person-group>, &#x201C;<article-title>AI-based smart prediction of clinical disease using random forest classifier and Naive Bayes</article-title>,&#x201D; <source>J. Supercomput.</source>, vol. <volume>77</volume>, no. <issue>5</issue>, pp. <fpage>5198</fpage>&#x2013;<lpage>5219</lpage>, <year>Nov. 2020</year>. doi: <pub-id pub-id-type="doi">10.1007/s11227-020-03481-x</pub-id>.</mixed-citation></ref>
<ref id="ref-32"><label>[32]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Kumari</surname></string-name> and <string-name><given-names>V.</given-names> <surname>Singh</surname></string-name></person-group>, &#x201C;<article-title>Breast cancer prediction system</article-title>,&#x201D; <source>Procedia Comput. Sci.</source>, vol. <volume>132</volume>, no. <issue>1</issue>, pp. <fpage>371</fpage>&#x2013;<lpage>376</lpage>, <year>Jun. 2018</year>. doi: <pub-id pub-id-type="doi">10.1016/j.procs.2018.05.197</pub-id>.</mixed-citation></ref>
<ref id="ref-33"><label>[33]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Chaudhary</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Arora</surname></string-name>, and <string-name><given-names>N.</given-names> <surname>Yadav</surname></string-name></person-group>, &#x201C;<article-title>Optimization of random forest algorithm for breast cancer detection</article-title>,&#x201D; <source>Int. J. Innov. Res. Comput. Sci. Technol. (IJIRCST)</source>, vol. <volume>8</volume>, no. <issue>3</issue>, pp. <fpage>63</fpage>&#x2013;<lpage>66</lpage>, <year>May 2020</year>. doi: <pub-id pub-id-type="doi">10.21276/ijircst.2020.8.3.4</pub-id>.</mixed-citation></ref>
<ref id="ref-34"><label>[34]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Z.</given-names> <surname>Cao</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Duan</surname></string-name>, <string-name><given-names>G.</given-names> <surname>Yang</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Yue</surname></string-name>, and <string-name><given-names>Q.</given-names> <surname>Chen</surname></string-name></person-group>, &#x201C;<article-title>An experimental study on breast lesion detection and classification from ultrasound images using deep learning architectures</article-title>,&#x201D; <source>BMC Med. Imaging.</source>, vol. <volume>19</volume>, no. <issue>51</issue>, pp. <fpage>299</fpage>, <year>Jul. 2019</year>. doi: <pub-id pub-id-type="doi">10.1186/s12880-019-0349-x</pub-id>; <pub-id pub-id-type="pmid">31262255</pub-id></mixed-citation></ref>
<ref id="ref-35"><label>[35]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Y.</given-names> <surname>Gu</surname></string-name> <etal>et al.</etal></person-group>, &#x201C;<article-title>Deep learning based on ultrasound images assists breast lesion diagnosis in China: A multicenter diagnostic study</article-title>,&#x201D; <source>Insights Imaging</source>, vol. <volume>13</volume>, no. <issue>1</issue>, pp. <fpage>115</fpage>, <year>Jul. 2022</year>. doi: <pub-id pub-id-type="doi">10.1186/s13244-022-01259-8</pub-id>; <pub-id pub-id-type="pmid">35900608</pub-id></mixed-citation></ref>
<ref id="ref-36"><label>[36]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Y.</given-names> <surname>Li</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Gu</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Wang</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Qin</surname></string-name>, and <string-name><given-names>J.</given-names> <surname>Wang</surname></string-name></person-group>, &#x201C;<article-title>BUSnet: A deep learning model of breast tumor lesion detection for ultrasound images</article-title>,&#x201D; <source>Front. Oncol.</source>, vol. <volume>12</volume>, pp. <fpage>900</fpage>, <year>Mar. 2022</year>. doi: <pub-id pub-id-type="doi">10.3389/fonc.2022.848271</pub-id>; <pub-id pub-id-type="pmid">35402269</pub-id></mixed-citation></ref>
<ref id="ref-37"><label>[37]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Jabeen</surname></string-name> <etal>et al.</etal></person-group>, &#x201C;<article-title>Breast cancer classification from ultrasound images using probability-based optimal deep learning feature fusion</article-title>,&#x201D; <source>Sens.</source>, vol. <volume>22</volume>, no. <issue>3</issue>, pp. <fpage>807</fpage>, <year>Jan. 2022</year>. doi: <pub-id pub-id-type="doi">10.3390/s22030807</pub-id>; <pub-id pub-id-type="pmid">35161552</pub-id></mixed-citation></ref>
<ref id="ref-38"><label>[38]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Chen</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Wu</surname></string-name>, <string-name><given-names>C.</given-names> <surname>Pan</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Lian</surname></string-name>, and <string-name><given-names>Q.</given-names> <surname>Su</surname></string-name></person-group>, &#x201C;<article-title>Breast ultrasound image classification and physiological assessment based on GoogLeNet</article-title>,&#x201D; <source>J. Radiat. Res. Appl. Sci.</source>, vol. <volume>16</volume>, no. <issue>3</issue>, pp. <fpage>100628</fpage>, <year>Sept. 2022</year>. doi: <pub-id pub-id-type="doi">10.1016/j.jrras.2023.100628</pub-id>.</mixed-citation></ref>
<ref id="ref-39"><label>[39]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S. S.</given-names> <surname>Naveed</surname></string-name></person-group>, &#x201C;<article-title>Prediction of breast cancer through random forest</article-title>,&#x201D; <source>Curr. Med. Imaging</source>, vol. <volume>19</volume>, no. <issue>10</issue>, pp. <fpage>12</fpage>, <year>Nov. 2022</year>. doi: <pub-id pub-id-type="doi">10.2174/1573405618666220930150625</pub-id>; <pub-id pub-id-type="pmid">36200251</pub-id></mixed-citation></ref>
<ref id="ref-40"><label>[40]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J.</given-names> <surname>Li</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Shi</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Chen</surname></string-name>, <string-name><given-names>Z.</given-names> <surname>Du</surname></string-name>, and <string-name><given-names>L.</given-names> <surname>Huang</surname></string-name></person-group>, &#x201C;<article-title>Self-attention random forest for breast cancer image classification</article-title>,&#x201D; <source>Front. Oncol.</source>, vol. <volume>13</volume>, pp. <fpage>101880</fpage>, <year>Feb. 2023</year>. doi: <pub-id pub-id-type="doi">10.3389/fonc.2023.1043463</pub-id>; <pub-id pub-id-type="pmid">36814814</pub-id></mixed-citation></ref>
<ref id="ref-41"><label>[41]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>W.</given-names> <surname>Ding</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Zhang</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Zhuang</surname></string-name>, <string-name><given-names>Z.</given-names> <surname>Zhuang</surname></string-name>, and <string-name><given-names>Z.</given-names> <surname>Gao</surname></string-name></person-group>, &#x201C;<article-title>Multi-view stereoscopic attention network for 3D tumor classification in automated breast ultrasound</article-title>,&#x201D; <source>Expert. Syst. Appl.</source>, vol. <volume>234</volume>, no. <issue>1</issue>, pp. <fpage>120969</fpage>, <year>Dec. 2023</year>. doi: <pub-id pub-id-type="doi">10.1016/j.eswa.2023.120969</pub-id>.</mixed-citation></ref>
<ref id="ref-42"><label>[42]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>D. A.</given-names> <surname>Dattatray</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Mehadi</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Vijay</surname></string-name>, and <string-name><given-names>R.</given-names> <surname>Chittibabu</surname></string-name></person-group>, &#x201C;<article-title>Deep learning-based feature fusion and transfer learning for approximating pIC value Of COVID-19 medicine using drug discovery data</article-title>,&#x201D; <source>J. Mech. Med. Biol.,</source>World Scientific Publishing Co., pp. <fpage>2350100</fpage>, <year>2023</year>. doi: <pub-id pub-id-type="doi">10.1142/S0219519423501002</pub-id>.</mixed-citation></ref>
<ref id="ref-43"><label>[43]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Wei</surname></string-name> <etal>et al.</etal></person-group>, &#x201C;<article-title>A benign and malignant breast tumor classification method via efficiently combining texture and morphological features on ultrasound images</article-title>,&#x201D; <source>Comput. Math. Methods Med.</source>, vol. <volume>2020</volume>, pp. <fpage>1</fpage>&#x2013;<lpage>12</lpage>, <year>2020</year>. doi: <pub-id pub-id-type="doi">10.1155/2020/5894010</pub-id>; <pub-id pub-id-type="pmid">33062038</pub-id></mixed-citation></ref>
<ref id="ref-44"><label>[44]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>T. F.</given-names> <surname>Chan</surname></string-name> and <string-name><given-names>L. A.</given-names> <surname>Vese</surname></string-name></person-group>, &#x201C;<article-title>Active contours without edges</article-title>,&#x201D; <source>IEEE Trans. Image Process.</source>, vol. <volume>10</volume>, no. <issue>2</issue>, pp. <fpage>266</fpage>&#x2013;<lpage>277</lpage>, <year>Feb. 2001</year>. doi: <pub-id pub-id-type="doi">10.1109/83.902291</pub-id>; <pub-id pub-id-type="pmid">18249617</pub-id></mixed-citation></ref>
<ref id="ref-45"><label>[45]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Yu</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Chen</surname></string-name>, and <string-name><given-names>Y.</given-names> <surname>Chen</surname></string-name></person-group>, &#x201C;<article-title>Tumor segmentation in breast ultrasound image by means of res path combined with dense connection neural network</article-title>,&#x201D; <source>Diagnostics</source>, vol. <volume>11</volume>, no. <issue>9</issue>, pp. <fpage>1565</fpage>, <year>Aug. 2021</year>. doi: <pub-id pub-id-type="doi">10.3390/diagnostics11091565</pub-id>; <pub-id pub-id-type="pmid">34573907</pub-id></mixed-citation></ref>
<ref id="ref-46"><label>[46]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>P.</given-names> <surname>Wahdan</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Nagy</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Saad</surname></string-name>, and <string-name><given-names>A.</given-names> <surname>Shoukry</surname></string-name></person-group>, &#x201C;<article-title>Automated breast tumour detection in ultrasound images using support vector machine and ensemble classification</article-title>,&#x201D; <source>J. Biomed. Eng. Biosci.</source>, vol. <volume>3</volume>, pp. <fpage>4</fpage>&#x2013;<lpage>11</lpage>, <year>May 2016</year>. doi: <pub-id pub-id-type="doi">10.11159/jbeb.2016.002</pub-id>.</mixed-citation></ref>
<ref id="ref-47"><label>[47]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Naga Durga</surname></string-name> and <string-name><given-names>K. V.</given-names> <surname>Krishnam Raju</surname></string-name></person-group>, &#x201C;<article-title>An improved prediction of breast cancer using deep neural networks (DNN)</article-title>,&#x201D; <source>Int. J. Modern Trends Sci. Technol.</source>, vol. <volume>7</volume>, no. <issue>0706221</issue>, pp. <fpage>25</fpage>&#x2013;<lpage>30</lpage>, <year>Jul. 2021</year>. doi: <pub-id pub-id-type="doi">10.46501/IJMTST0707005</pub-id>.</mixed-citation></ref>
</ref-list>
</back></article>