<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.1 20151215//EN" "http://jats.nlm.nih.gov/publishing/1.1/JATS-journalpublishing1.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="1.1">
<front>
<journal-meta>
<journal-id journal-id-type="pmc">CMC</journal-id>
<journal-id journal-id-type="nlm-ta">CMC</journal-id>
<journal-id journal-id-type="publisher-id">CMC</journal-id>
<journal-title-group>
<journal-title>Computers, Materials &#x0026; Continua</journal-title>
</journal-title-group>
<issn pub-type="epub">1546-2226</issn>
<issn pub-type="ppub">1546-2218</issn>
<publisher>
<publisher-name>Tech Science Press</publisher-name>
<publisher-loc>USA</publisher-loc>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">28824</article-id>
<article-id pub-id-type="doi">10.32604/cmc.2023.028824</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Article</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Crops Leaf Diseases Recognition: A Framework of Optimum Deep Learning Features</article-title>
<alt-title alt-title-type="left-running-head">Crops Leaf Diseases Recognition: A Framework of Optimum Deep Learning Features</alt-title>
<alt-title alt-title-type="right-running-head">Crops Leaf Diseases Recognition: A Framework of Optimum Deep Learning Features</alt-title>
</title-group>
<contrib-group content-type="authors">
<contrib id="author-1" contrib-type="author">
<name name-style="western"><surname>Abbas</surname><given-names>Shafaq</given-names></name><xref ref-type="aff" rid="aff-1">1</xref></contrib>
<contrib id="author-2" contrib-type="author">
<name name-style="western"><surname>Khan</surname><given-names>Muhammad Attique</given-names></name><xref ref-type="aff" rid="aff-1">1</xref></contrib>
<contrib id="author-3" contrib-type="author">
<name name-style="western"><surname>Alhaisoni</surname><given-names>Majed</given-names></name><xref ref-type="aff" rid="aff-2">2</xref></contrib>
<contrib id="author-4" contrib-type="author">
<name name-style="western"><surname>Tariq</surname><given-names>Usman</given-names></name><xref ref-type="aff" rid="aff-3">3</xref></contrib>
<contrib id="author-5" contrib-type="author">
<name name-style="western"><surname>Armghan</surname><given-names>Ammar</given-names></name><xref ref-type="aff" rid="aff-4">4</xref></contrib>
<contrib id="author-6" contrib-type="author">
<name name-style="western"><surname>Alenezi</surname><given-names>Fayadh</given-names></name><xref ref-type="aff" rid="aff-4">4</xref></contrib>
<contrib id="author-7" contrib-type="author">
<name name-style="western"><surname>Majumdar</surname><given-names>Arnab</given-names></name><xref ref-type="aff" rid="aff-5">5</xref></contrib>
<contrib id="author-8" contrib-type="author" corresp="yes">
<name name-style="western"><surname>Thinnukool</surname><given-names>Orawit</given-names></name><xref ref-type="aff" rid="aff-6">6</xref><email>orawit.t@cmu.ac.th</email></contrib>
<aff id="aff-1"><label>1</label><institution>Department of Computer Science, HITEC University</institution>, <addr-line>Taxila</addr-line>, <country>Pakistan</country></aff>
<aff id="aff-2"><label>2</label><institution>Computer Sciences Department, College of Computer and Information Sciences</institution>, <addr-line>Princess Nourah bint Abdulrahman University, Riyadh 11671</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff-3"><label>3</label><institution>College of Computer Engineering and Science, Prince Sattam Bin Abdulaziz University</institution>, <addr-line>Al-Kharaj, 11942</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff-4"><label>4</label><institution>Department of Electrical Engineering, College of Engineering, Jouf University</institution>, <addr-line>Sakakah</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff-5"><label>5</label><institution>Faculty of Engineering, Imperial College London</institution>, <addr-line>London, SW7 2AZ</addr-line>, <country>UK</country></aff>
<aff id="aff-6"><label>6</label><institution>College of Arts, Media, and Technology, Chiang Mai University</institution>, <addr-line>Chiang Mai, 50200</addr-line>, <country>Thailand</country></aff>
</contrib-group>
<author-notes>
<corresp id="cor1"><label>&#x002A;</label>Corresponding Author: Orawit Thinnukool. Email: <email>orawit.t@cmu.ac.th</email></corresp>
</author-notes>
<pub-date pub-type="epub" date-type="pub" iso-8601-date="2022-08-16"><day>16</day>
<month>08</month>
<year>2022</year></pub-date>
<volume>74</volume>
<issue>1</issue>
<fpage>1139</fpage>
<lpage>1159</lpage>
<history>
<date date-type="received"><day>18</day><month>2</month><year>2022</year></date>
<date date-type="accepted"><day>04</day><month>5</month><year>2022</year></date>
</history>
<permissions>
<copyright-statement>&#x00A9; 2023 Abbas et al.</copyright-statement>
<copyright-year>2023</copyright-year>
<copyright-holder>Abbas et al.</copyright-holder>
<license xlink:href="https://creativecommons.org/licenses/by/4.0/">
<license-p>This work is licensed under a <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution 4.0 International License</ext-link>, which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.</license-p>
</license>
</permissions>
<self-uri content-type="pdf" xlink:href="TSP_CMC_28824.pdf"></self-uri>
<abstract>
<p>Manual diagnosis of crops diseases is not an easy process; thus, a computerized method is widely used. From a couple of years, advancements in the domain of machine learning, such as deep learning, have shown substantial success. However, they still faced some challenges such as similarity in disease symptoms and irrelevant features extraction. In this article, we proposed a new deep learning architecture with optimization algorithm for cucumber and potato leaf diseases recognition. The proposed architecture consists of five steps. In the first step, data augmentation is performed to increase the numbers of training samples. In the second step, pre-trained DarkNet19 deep model is opted and fine-tuned that later utilized for the training of fine-tuned model through transfer learning. Deep features are extracted from the global pooling layer in the next step that is refined using Improved Cuckoo search algorithm. The best selected features are finally classified using machine learning classifiers such as SVM, and named a few more for final classification results. The proposed architecture is tested using publicly available datasets&#x2013;Cucumber National Dataset and Plant Village. The proposed architecture achieved an accuracy of 100.0&#x0025;, 92.9&#x0025;, and 99.2&#x0025;, respectively. A comparison with recent techniques is also performed, revealing that the proposed method achieved improved accuracy while consuming less computational time.</p>
</abstract>
<kwd-group kwd-group-type="author">
<kwd>Crops diseases</kwd>
<kwd>preprocessing</kwd>
<kwd>convolutional neural network</kwd>
<kwd>features optimization</kwd>
<kwd>machine learning</kwd>
</kwd-group>
</article-meta>
</front>
<body>
<sec id="s1"><label>1</label><title>Introduction</title>
<p>Agriculture contributes significantly to economic growth and poverty reduction. This sector, which accounts for 26&#x0025; of total revenue, provides direct assistance to the country&#x2019;s citizens. Cotton, wheat, rice, sugar beets, fruits, and vegetables are among the most important agricultural crops [<xref ref-type="bibr" rid="ref-1">1</xref>]. Changes in agricultural production and quality have a significant financial impact on the country. As a result, detecting illnesses in plants at an early stage is critical, because crop output has a significant impact on them. Plant diseases reduce agricultural productivity and quality. Computer vision approaches can greatly aid in early stage disease diagnosis [<xref ref-type="bibr" rid="ref-2">2</xref>]. With advancements in the field of Computer vision (CV), it is high chance to detect and recognize plant diseases [<xref ref-type="bibr" rid="ref-3">3</xref>]. Climate change is to blame for the apparent global spread of plant diseases. When new diseases emerge in different parts of the world, they are rarely discovered [<xref ref-type="bibr" rid="ref-4">4</xref>]. Plant diseases pose significant challenges to food security and early detection of infection symptoms. This is still a challenge in many parts of the world, resulting in a shortage of high operational and maintenance personnel [<xref ref-type="bibr" rid="ref-5">5</xref>]. Early detection of these plant diseases can aid in the prevention of national economic losses. As a result, it is critical to protect crops against diseases from the beginning [<xref ref-type="bibr" rid="ref-6">6</xref>]. In recent years, agricultural innovation has focused on increasing agricultural production supply while lowering costs and improving profitability. Manual disease detection can take a long time and may necessitate extra work. One of the most perplexing factors impeding plant magnification is disease assault. According to extensive agricultural research, plant disease-related issues may reduce the quality and quantity of agricultural goods. A machine learning methodology, rather than a manual method, can identify many infections more quickly [<xref ref-type="bibr" rid="ref-7">7</xref>]. Tomato, potato, and cucumber are among the most important vegetables in several nations, including India, Pakistan, and Algeria. However, illnesses have an adverse influence on the production of these crops, resulting in a food scarcity. Because of the lack of specialist availability, the manual process of treating these crops&#x2019; leaf diseases is complex and time-consuming [<xref ref-type="bibr" rid="ref-8">8</xref>]. Plant diseases detection using an automated technique is advantageous since it reduces the amount of supervision necessary in big local farmers and identifies disease signs immediately, i.e., when they first appear on leaf tissue [<xref ref-type="bibr" rid="ref-9">9</xref>].</p>
<p>Lately, computer vision (CV) shows much interest in the domain of agriculture for plant diseases detection and classification at the early stage [<xref ref-type="bibr" rid="ref-10">10</xref>]. The manual processing of plant diseases is a time consuming and required an expert person. Therefore, a computerized method is widely required for diseases detection and classification [<xref ref-type="bibr" rid="ref-11">11</xref>]. A computerized method includes some important steps such as preprocessing of original images, detection of infected parts using image processing techniques, features extraction, reduction of redundant features, and classification using machine learning algorithms [<xref ref-type="bibr" rid="ref-12">12</xref>]. Preprocessing is an important step in agriculture for better visualization of infected part of the crop leaf. The better contrast images helped in the important features extraction. Features are extracted through some important techniques such as shape, texture, and named a few more. However, these traditional features extraction methods are not performed well once the numbers of training data increased. Recently, deep learning (DL) demonstrated outstanding performance in the agriculture domain for both disease detection and classification [<xref ref-type="bibr" rid="ref-13">13</xref>]. The performance of deep learning methods is better than the traditional techniques [<xref ref-type="bibr" rid="ref-14">14</xref>]. Convolutional neural network (CNN) is a type of deep learning consists of some important layers such as input layer, convolutional layer, normalization and activation layers, fully connected layer, and finally a Softmax layer. Using a CNN model, features are extracted from the fully connected layers. However, sometimes redundant features are also extracted that misleads the classification process. Therefore, the researchers developed some features reduction techniques such as entropy based approach, Genetic Algorithm (GA), and named a few more [<xref ref-type="bibr" rid="ref-15">15</xref>]. Finally, the selected features are classified using machine learning classifiers such as support vector machine (SVM), K-Nearest Neighbor (KNN), and named a few more [<xref ref-type="bibr" rid="ref-16">16</xref>].</p>
<p>Many techniques have been introduced in the literature for crops leaf diseases using deep learning and features optimization. Ma&#x00A0;et&#x00A0;al.&#x00A0;[<xref ref-type="bibr" rid="ref-17">17</xref>] presented a projected deep convolutional neural network (DCNN) based method for recognition of cucumber leaf diseases such as anthracnose, downy mildew, powdery mildew, and target leaf spots. Data augmentation methods were used to reduce the likelihood of imbalanced datasets by the segmented symptom images. They used total 14,208 symptom images from augmented datasets for the experimental process and attained an accuracy of 93.4&#x0025;. Singh&#x00A0;et&#x00A0;al.&#x00A0;[<xref ref-type="bibr" rid="ref-18">18</xref>] multi-class classification technique for tomato ripened analysis. In the presented method, features are extracted from the images and reduced using Principal Component Analysis (PCA) that later classified using machine learning classifiers such as SVM and named a few more. The experimental procedure was performed on 250 images and attained an accuracy of 90.80&#x0025; by employing multi-class SVM. Sravan&#x00A0;et&#x00A0;al.&#x00A0;[<xref ref-type="bibr" rid="ref-19">19</xref>] presented a CNN based method for crops leaf diseases recognition. In the presented method, ResNet50 model was employed for features extraction through transfer learning (TL). They collected 20,639 images from the database of Plant Village and attained 99.26&#x0025; accuracy. Chakraborty&#x00A0;et&#x00A0;al.&#x00A0;[<xref ref-type="bibr" rid="ref-20">20</xref>] presented a hybrid CNN based framework for potato leaf diseases recognition. They used four pre-trained CNN models named MobileNet, VGG16, ResNet, and VGG19 and trained on Plant Village dataset. The results are computed on each model and VGG16 gives better results of 97.8&#x0025;. Khalifa&#x00A0;et&#x00A0;al.&#x00A0;[<xref ref-type="bibr" rid="ref-21">21</xref>] presented a 14-layered CNN architecture for potato leaf diseases recognition. They used 2 convolutional layers having different window sizes that followed by two 2 FC layers. The features are extracted from the convolutional layers for the final classification. They also employed a data augmentation step to increase the number of images for better testing accuracy. On the selected dataset, the presented method attained an accuracy of 98&#x0025;. Rashid at al. [<xref ref-type="bibr" rid="ref-22">22</xref>] presented a yoloV5 and CNN based framework for potato leaf diseases recognition. They collected more than 4000 images for the experimental process and attained an accuracy of 99&#x0025;. Thangaraj&#x00A0;et&#x00A0;al.&#x00A0;[<xref ref-type="bibr" rid="ref-23">23</xref>] presented a deep transfer learning based architecture for tomato leaf diseases recognition. They used three optimization methods for the training of a CNN model such as Adam, stochastic gradient descent (SGD), and Rgprops. Based on the analysis, they conclude that Rgprops attained better accuracy.</p>
<p><bold>Motivation and Problem Statement:</bold> Manual inception of crop leaf diseases is not an easy task. There are many manual techniques available but they are costly and time consuming. The diseases in the crops, impact on the both quality and quantity of foods. The use of computerized techniques is widely required for early detection and classification of crops diseases. Many computerized techniques have been introduced in the literature but still they are facing the challenges-i) contrast and noise in the original images extracts the irrelevant and noisy features that in the outcome recognition accuracy is stained; ii) normally, the researchers extracted deep features from the fully connected layers but after the visualization process, some important features are seems missing. Due to the missing features, incorrect predication results are retrieved, and iii) several extracted features includes some redundant information; therefore, it affects the classification accuracy and consume more time during the training and testing process. In this paper, a deep learning and feature optimization-based architecture for crop leaf disease recognition is proposed. Our most significant contributions are as follows:
<list list-type="bullet">
<list-item><p>A hybridization-based contrast enhancement technique is proposed.</p></list-item>
<list-item><p>Tweaked the Darknet19 CNN model and trained it with transfer learning. Weights from 50&#x0025; of the layers are considered for training instead of 20&#x0025; in the learning process.</p></list-item>
<list-item><p>A new Cuckoo Search algorithm is introduced for selecting the best features. A fitness function and MSER loss are used to evaluate the selected features.</p></list-item>
</list></p>
<p>The remainder of this article is divided into the sections listed below. Section 2 discusses the datasets used in the experimental process in detail. The deep learning and optimization methodologies are presented in Section 3. Section 4 presents the findings and comparisons. Finally conclude the manuscript.</p>
</sec>
<sec id="s2"><label>2</label><title>Datasets</title>
<p>In this work, two publically available datasets are utilized named&#x2013;China National Cucumber (CNC) dataset [<xref ref-type="bibr" rid="ref-24">24</xref>] and Plant Village [<xref ref-type="bibr" rid="ref-25">25</xref>]. The CNC dataset includes five cumber diseases, as illustrated in <xref ref-type="fig" rid="fig-1">Fig. 1</xref>. The Plant Village dataset includes more than 30 diseases classes but in this work, we only consider the Potato and Tomato crops. A few sample images are illustrated in <xref ref-type="fig" rid="fig-2">Fig. 2</xref>. Both datasets includes RGB images having different dimension. The detail of diseased leaf images of each crop is presented in <xref ref-type="table" rid="table-1">Tab. 1</xref>.</p>
<fig id="fig-1"><label>Figure 1</label><caption><title>Sample images of cucumber leaf diseases</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-1.png"/></fig>
<fig id="fig-2"><label>Figure 2</label><caption><title>Sample images of Tomato leaf diseases recognition</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-2.png"/></fig>
<table-wrap id="table-1"><label>Table 1</label><caption><title>Brief description of selected datasets</title></caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th align="left">Crop</th>
<th align="left">Diseases</th>
<th align="left">Total images</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left">Potato</td>
<td align="left">Early blight</td>
<td align="left">1000</td>
</tr>
<tr>
<td/>
<td align="left">Late blight</td>
<td align="left">1000</td>
</tr>
<tr>
<td/>
<td align="left">Healthy</td>
<td align="left">152</td>
</tr>
<tr>
<td align="left">Cucumber</td>
<td align="left">Angular leaf spot</td>
<td align="left">944</td>
</tr>
<tr>
<td/>
<td align="left">Anthracnose</td>
<td align="left">979</td>
</tr>
<tr>
<td/>
<td align="left">blight</td>
<td align="left">1020</td>
</tr>
<tr>
<td/>
<td align="left">Powdery miLDew</td>
<td align="left">924</td>
</tr>
<tr>
<td/>
<td align="left">Downy miLDew</td>
<td align="left">996</td>
</tr>
<tr>
<td align="left">Tomato</td>
<td align="left">Bacterial spot</td>
<td align="left">2,127</td>
</tr>
<tr>
<td/>
<td align="left">Early blight</td>
<td align="left">1000</td>
</tr>
<tr>
<td/>
<td align="left">Healthy</td>
<td align="left">1591</td>
</tr>
<tr>
<td/>
<td align="left">Late blight</td>
<td align="left">1,091</td>
</tr>
<tr>
<td/>
<td align="left">Leaf mold</td>
<td align="left">952</td>
</tr>
<tr>
<td/>
<td align="left">Septoria leaf spot</td>
<td align="left">1,771</td>
</tr>
<tr>
<td/>
<td align="left">Two spotted spider mites</td>
<td align="left">1,676</td>
</tr>
<tr>
<td/>
<td align="left">Mosaic virus</td>
<td align="left">373</td>
</tr>
<tr>
<td/>
<td align="left">Yellow leaf curl virus</td>
<td align="left">5,357</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3"><label>3</label><title>Proposed Methodology</title>
<p>The proposed crops leaf diseases recognition framework is illustrated in <xref ref-type="fig" rid="fig-3">Fig. 3</xref>. The proposed framework includes the following important steps. In the first step, data augmentation is performed to increase the numbers of training samples. In the second step, pre-trained DarkNet19 deep model is opted and fine-tuned. The fine-tuned model is trained using transfer learning in the third step and deep features are extracted from the global pooling layer. In the fourth step, Improved Cuckoo search algorithm is applied on extracted deep feature vector and selects the best features. Finally, the best selected features are classified using machine learning classifiers in the fifth step. The explanation of each step is given below.</p>
<sec id="s3_1"><label>3.1</label><title>Contrast Enhancement and Data Augmentation</title>
<fig id="fig-3"><label>Figure 3</label><caption><title>Proposed flow of crops leaf diseases recognition using deep learning and features optimization</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-3.png"/></fig>
<p>Enhancement of an image is an important research area in image processing form the last decade. Many techniques are introduced in the literature for contrast enhancement such as histogram equalization, filtering, and named a few more. The main purpose of this step is to improve the contrast of original image for better visualization. In the domain of agriculture, the core idea behind the use of contrast enhancement is to highlight the infected regions that later utilized for accurate features extraction.</p>
<p>In this article, we implemented a hybrid approach for contrast enhancement. Three steps based hybrid approach is implemented: i) image is processed through 3D box filter of filter size <inline-formula id="ieqn-1"><mml:math id="mml-ieqn-1"><mml:mn>3</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:mn>3</mml:mn></mml:math></inline-formula>; ii) top hat and bottom hat filtering applied to increase the local contrast of infected region, and iii) a CNN model is trained named VGG19 and applied on each image for final refinement. Mathematically, this process is defined through <xref ref-type="disp-formula" rid="eqn-1">Eqs. (1)</xref>&#x2013;<xref ref-type="disp-formula" rid="eqn-5">(5)</xref>.
<disp-formula id="eqn-1"><label>(1)</label><mml:math id="mml-eqn-1" display="block"><mml:mi>B</mml:mi><mml:mi>F</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mi>B</mml:mi><mml:mi>X</mml:mi><mml:mi>F</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>I</mml:mi><mml:mo>,</mml:mo><mml:mspace width="thickmathspace" /><mml:mrow><mml:mo>[</mml:mo><mml:mi>m</mml:mi><mml:mo>,</mml:mo><mml:mi>n</mml:mi><mml:mo>]</mml:mo></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula>
<disp-formula id="eqn-2"><label>(2)</label><mml:math id="mml-eqn-2" display="block"><mml:mi>T</mml:mi><mml:mi>h</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mi>T</mml:mi><mml:mi>o</mml:mi><mml:mi>h</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>B</mml:mi><mml:mi>F</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula>
<disp-formula id="eqn-3"><label>(3)</label><mml:math id="mml-eqn-3" display="block"><mml:mi>B</mml:mi><mml:mi>h</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mi>B</mml:mi><mml:mi>o</mml:mi><mml:mi>h</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>B</mml:mi><mml:mi>F</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula>
<disp-formula id="eqn-4"><label>(4)</label><mml:math id="mml-eqn-4" display="block"><mml:mi>T</mml:mi><mml:mi>B</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi>T</mml:mi><mml:mi>h</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>+</mml:mo><mml:mi>B</mml:mi><mml:mi>h</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>B</mml:mi><mml:mi>F</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula>
<disp-formula id="eqn-5"><label>(5)</label><mml:math id="mml-eqn-5" display="block"><mml:mi>C</mml:mi><mml:mi>F</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mi>&#x2127;</mml:mi></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mover><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:mo>&#x007E;</mml:mo></mml:mover></mml:mrow><mml:mo>,</mml:mo><mml:mspace width="thickmathspace" /><mml:mi>T</mml:mi><mml:mi>B</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula>
where, <inline-formula id="ieqn-2"><mml:math id="mml-ieqn-2"><mml:mi>B</mml:mi><mml:mi>F</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula> is box filtered image, <inline-formula id="ieqn-3"><mml:math id="mml-ieqn-3"><mml:mi>B</mml:mi><mml:mi>X</mml:mi><mml:mi>F</mml:mi></mml:math></inline-formula> is box filter function, <inline-formula id="ieqn-4"><mml:math id="mml-ieqn-4"><mml:mrow><mml:mo>[</mml:mo><mml:mi>m</mml:mi><mml:mo>,</mml:mo><mml:mi>n</mml:mi><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> represents the filter size, <inline-formula id="ieqn-5"><mml:math id="mml-ieqn-5"><mml:mi>T</mml:mi><mml:mi>h</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula> represents the top hat filtered pixels, <inline-formula id="ieqn-6"><mml:math id="mml-ieqn-6"><mml:mi>B</mml:mi><mml:mi>h</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula> represents the bottom hat filtered image, <inline-formula id="ieqn-7"><mml:math id="mml-ieqn-7"><mml:mi>T</mml:mi><mml:mi>o</mml:mi><mml:mi>h</mml:mi></mml:math></inline-formula> is top-hat function, <inline-formula id="ieqn-8"><mml:math id="mml-ieqn-8"><mml:mi>B</mml:mi><mml:mi>o</mml:mi><mml:mi>h</mml:mi></mml:math></inline-formula> is bottom hat function, <inline-formula id="ieqn-9"><mml:math id="mml-ieqn-9"><mml:mi>s</mml:mi><mml:mi>e</mml:mi></mml:math></inline-formula> is structuring element, <inline-formula id="ieqn-10"><mml:math id="mml-ieqn-10"><mml:mi>T</mml:mi><mml:mi>B</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula> is top-bottom filtered image, <inline-formula id="ieqn-11"><mml:math id="mml-ieqn-11"><mml:mi>C</mml:mi><mml:mi>F</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula> represents the final image, <inline-formula id="ieqn-12"><mml:math id="mml-ieqn-12"><mml:mrow><mml:mi>&#x2127;</mml:mi></mml:mrow></mml:math></inline-formula>  represents the CNN operation, and <inline-formula id="ieqn-13"><mml:math id="mml-ieqn-13"><mml:mrow><mml:mover><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:mo>&#x007E;</mml:mo></mml:mover></mml:mrow></mml:math></inline-formula> is trained CNN model.</p>
<p>After that, we applied a data augmentation step to increase the numbers of images of each dataset for the sake of better training of a CNN model. Through this step, the Over fitting challenge is efficiently controlled. The process modifies the actual images so that the new images have the same properties as the originals. The benefit of data augmentation is that it allows you to train a model on a single image in several directions. Three well known operations such as left to right flip (LR), vertical flip (UD), and rotate 90 are applied on each image.</p>
<p>These above mentioned operations applied on selected crops such as Tomato, Cucumber, and Potato. The each crop originally consists of 15,938 images, 4863 images, and 2152 images, respectively. As given in <xref ref-type="table" rid="table-1">Tab. 1</xref>, the each class is showing imbalanced; therefore, we applied three aforementioned operations. After these operations, each class images of tomato disease increased to 6000, cucumber class reached to 2000, and potato class reached to 4000, respectively. This augmented dataset is alter utilized for the training of a CNN model. A few sample image of data augmentation operations are illustrated in <xref ref-type="fig" rid="fig-4">Fig. 4</xref>.</p>
<fig id="fig-4"><label>Figure 4</label><caption><title>Visual samples of data augmentation process</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-4.png"/></fig>
</sec>
<sec id="s3_2"><label>3.2</label><title>Convolutional Neural Networks (CNN)</title>
<p>Convolutional neural network (CNN) is powerful technique in machine learning for features extraction and image classification. A CNN model includes several layers such as convolutional, pooling, ReLu activation, normalization, fully connected, and softmax. Convolutional layer is the first layer known as feature extracted layer. It has finite number of filters. This layer joins a bunch of <inline-formula id="ieqn-14"><mml:math id="mml-ieqn-14"><mml:msub><mml:mi>M</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> filters to a bunch of <inline-formula id="ieqn-15"><mml:math id="mml-ieqn-15"><mml:msub><mml:mi>T</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> channels and sizes. <inline-formula id="ieqn-16"><mml:math id="mml-ieqn-16"><mml:msub><mml:mi>A</mml:mi><mml:mrow><mml:mi>c</mml:mi></mml:mrow></mml:msub><mml:mo>&#x00D7;</mml:mo><mml:msub><mml:mi>B</mml:mi><mml:mrow><mml:mi>c</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> in differences to a small one&#x2019;s of <inline-formula id="ieqn-17"><mml:math id="mml-ieqn-17"><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>c</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> images with <inline-formula id="ieqn-18"><mml:math id="mml-ieqn-18"><mml:msub><mml:mi>T</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> channel size <inline-formula id="ieqn-19"><mml:math id="mml-ieqn-19"><mml:mrow><mml:mtext mathvariant="italic">Height</mml:mtext></mml:mrow><mml:mo>&#x00D7;</mml:mo><mml:mrow><mml:mtext mathvariant="italic">Width</mml:mtext></mml:mrow></mml:math></inline-formula>. The channel components are indicated by <inline-formula id="ieqn-20"><mml:math id="mml-ieqn-20"><mml:msub><mml:mi>C</mml:mi><mml:mrow><mml:mi>w</mml:mi><mml:mo>,</mml:mo><mml:mi>a</mml:mi><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo>,</mml:mo><mml:mi>c</mml:mi><mml:mspace width="thickmathspace" /></mml:mrow></mml:msub></mml:math></inline-formula> and image components signified by <inline-formula id="ieqn-21"><mml:math id="mml-ieqn-21"><mml:msub><mml:mi>G</mml:mi><mml:mrow><mml:mi>g</mml:mi><mml:mo>,</mml:mo><mml:mi>h</mml:mi><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mi>i</mml:mi><mml:mspace width="thickmathspace" /></mml:mrow></mml:msub></mml:math></inline-formula> at that point the detailing of the convolutional operation is characterized as follows:
<disp-formula id="eqn-6"><label>(6)</label><mml:math id="mml-eqn-6" display="block"><mml:mi>Y</mml:mi><mml:msub><mml:mi>i</mml:mi><mml:mrow><mml:mi>g</mml:mi><mml:mo>,</mml:mo><mml:mi>w</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:msubsup><mml:mrow><mml:mo>&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>o</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msubsup><mml:msubsup><mml:mrow><mml:mo>&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>p</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>R</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msubsup><mml:msubsup><mml:mrow><mml:mo>&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>q</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>S</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msubsup><mml:msub><mml:mi>G</mml:mi><mml:mrow><mml:mi>g</mml:mi><mml:mo>,</mml:mo><mml:mi>h</mml:mi><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>i</mml:mi><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mi>i</mml:mi><mml:mo>+</mml:mo><mml:mi>y</mml:mi><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mi>i</mml:mi><mml:mo>+</mml:mo><mml:mi>z</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mi>C</mml:mi><mml:mrow><mml:mi>w</mml:mi><mml:mo>,</mml:mo><mml:mi>a</mml:mi><mml:mo>,</mml:mo><mml:mi>b</mml:mi><mml:mo>,</mml:mo><mml:mi>c</mml:mi></mml:mrow></mml:msub></mml:math></disp-formula></p>
<p>A full image/filer combination&#x2019;s output can be represented as:
<disp-formula id="eqn-7"><label>(7)</label><mml:math id="mml-eqn-7" display="block"><mml:mi>Y</mml:mi><mml:msub><mml:mi>i</mml:mi><mml:mrow><mml:mi>g</mml:mi><mml:mo>,</mml:mo><mml:mi>w</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:msubsup><mml:mrow><mml:mo>&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>o</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msubsup><mml:msub><mml:mi>G</mml:mi><mml:mrow><mml:mi>g</mml:mi><mml:mo>,</mml:mo><mml:mi>h</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2217;</mml:mo><mml:mspace width="thickmathspace" /><mml:msub><mml:mi>C</mml:mi><mml:mrow><mml:mi>w</mml:mi><mml:mo>,</mml:mo><mml:mi>a</mml:mi></mml:mrow></mml:msub></mml:math></disp-formula>where the two-dimensional relationship is denoted by &#x002A;. ReLU is the activation layer stands for Rectified linear unit. ReLU&#x2019;s goal is to increase the CNN&#x2019;s nonlinear behavior. It mark zero to all negative weight values and for the next phase, the subsequent positive weights will indeed be processed in the same way.
<disp-formula id="eqn-8"><label>(8)</label><mml:math id="mml-eqn-8" display="block"><mml:mi>R</mml:mi><mml:mi>e</mml:mi><mml:mi>L</mml:mi><mml:mi>U</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mi>A</mml:mi><mml:mrow><mml:mi>r</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mi>m</mml:mi><mml:mi>a</mml:mi><mml:mi>x</mml:mi><mml:mrow><mml:mo>{</mml:mo><mml:mn>0</mml:mn><mml:mo>,</mml:mo><mml:mspace width="thickmathspace" /><mml:mi>a</mml:mi><mml:mi>r</mml:mi><mml:mo>}</mml:mo></mml:mrow></mml:math></disp-formula>where the <inline-formula id="ieqn-22"><mml:math id="mml-ieqn-22"><mml:mi>a</mml:mi><mml:mi>r</mml:mi></mml:math></inline-formula> is input value in ReLU, and the output is <inline-formula id="ieqn-23"><mml:math id="mml-ieqn-23"><mml:msub><mml:mi>A</mml:mi><mml:mrow><mml:mi>r</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>. The following equation was used to update the weights using the updating rule.
<disp-formula id="eqn-9"><label>(9)</label><mml:math id="mml-eqn-9" display="block"><mml:mi>W</mml:mi><mml:mi>e</mml:mi><mml:mo>=</mml:mo><mml:mi>w</mml:mi><mml:mi>e</mml:mi><mml:mo>+</mml:mo><mml:mi>&#x03B4;</mml:mi><mml:mfrac><mml:mrow><mml:mi>&#x03B8;</mml:mi><mml:msup><mml:mi>F</mml:mi><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:mrow><mml:mi>&#x03B8;</mml:mi><mml:mi>w</mml:mi><mml:mi>e</mml:mi></mml:mrow></mml:mfrac></mml:math></disp-formula></p>
<p>Update weight is denoted by <inline-formula id="ieqn-24"><mml:math id="mml-ieqn-24"><mml:mi>W</mml:mi><mml:mi>e</mml:mi></mml:math></inline-formula>, the actual value is denoted by <inline-formula id="ieqn-25"><mml:math id="mml-ieqn-25"><mml:mi>w</mml:mi><mml:mi>e</mml:mi></mml:math></inline-formula>, and rate of learning presented by <inline-formula id="ieqn-26"><mml:math id="mml-ieqn-26"><mml:mi>&#x03B8;</mml:mi></mml:math></inline-formula>. The supremacy of ReLU is that it speedup training of dataset and sparsity in hidden units. In pooling layer, there are down sampling operations performed which reduce the dimensions of the network. In any case, the max pooling may be a broad and promising strategy within the literature since it gives noteworthy comes about by down sampling input estimate by 75&#x0025;. It also reduces computational work of network. There are the three pooling operations which are normally used such as maximum pooling, average pooling, and minimum pooling. Mathematically, operations are expressed in terms:
<disp-formula id="eqn-10"><label>(10)</label><mml:math id="mml-eqn-10" display="block"><mml:mi>P</mml:mi><mml:msup><mml:mi>l</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>n</mml:mi></mml:mrow></mml:msup><mml:mo>=</mml:mo><mml:mrow><mml:mtext mathvariant="italic">Maxpool</mml:mtext></mml:mrow><mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mi>P</mml:mi><mml:mi>l</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msup></mml:math></disp-formula></p>
<p>Maximum pooling represented with <inline-formula id="ieqn-27"><mml:math id="mml-ieqn-27"><mml:mrow><mml:mtext mathvariant="italic">Maxpool</mml:mtext></mml:mrow></mml:math></inline-formula> as well <inline-formula id="ieqn-28"><mml:math id="mml-ieqn-28"><mml:mi>i</mml:mi><mml:mi>n</mml:mi></mml:math></inline-formula> represented output. In the fully connected layer <inline-formula id="ieqn-29"><mml:math id="mml-ieqn-29"><mml:mi>f</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mrow><mml:mtext mathvariant="italic">fully</mml:mtext></mml:mrow></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mi>f</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mi>C</mml:mi><mml:mi>N</mml:mi><mml:mi>N</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, CNN&#x2019;s extracted characteristics were merged into 1D. The classification outcomes was then generated using the softmax function, as represented by the given equations:
<disp-formula id="eqn-11"><label>(11)</label><mml:math id="mml-eqn-11" display="block"><mml:mrow><mml:mrow><mml:mi mathvariant="script">V</mml:mi></mml:mrow></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mtext mathvariant="italic">Softmax</mml:mtext></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mi>f</mml:mi><mml:msub><mml:mi>u</mml:mi><mml:mrow><mml:mrow><mml:mtext mathvariant="italic">fully</mml:mtext></mml:mrow></mml:mrow></mml:msub><mml:mo>&#x2217;</mml:mo><mml:msub><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mi>&#x03C5;</mml:mi></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>&#x03BC;</mml:mi><mml:mrow><mml:mi>&#x03C5;</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula>where, the output represented by <inline-formula id="ieqn-30"><mml:math id="mml-ieqn-30"><mml:mrow><mml:mrow><mml:mi mathvariant="script">V</mml:mi></mml:mrow></mml:mrow></mml:math></inline-formula>, output bias is represented by <inline-formula id="ieqn-31"><mml:math id="mml-ieqn-31"><mml:msub><mml:mi>&#x03BC;</mml:mi><mml:mrow><mml:mi>&#x03C5;</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> and weight matrix represented by <inline-formula id="ieqn-32"><mml:math id="mml-ieqn-32"><mml:msub><mml:mi>&#x03C3;</mml:mi><mml:mrow><mml:mi>&#x03C5;</mml:mi></mml:mrow></mml:msub><mml:mo>.</mml:mo></mml:math></inline-formula> The FC layer works with a flattening input, which means that all neurons are connected to one another.</p>
</sec>
<sec id="s3_3"><label>3.3</label><title>Transfer Learning</title>
<p>In transfer learning, a model created for one problem is repurposed for a different problem based on a set of requirements. Its allow us to use knowledge from previously trained model to train newer model. It is the most frequent strategy in Computer Vision where models are employed as a starting point for solving other problems to consume less time. Mathematically, the TL is formulated as follows:</p>
<p>A domain <inline-formula id="ieqn-33"><mml:math id="mml-ieqn-33"><mml:mi>d</mml:mi><mml:mi>m</mml:mi><mml:mo>=</mml:mo><mml:mspace width="thickmathspace" /><mml:mrow><mml:mo>{</mml:mo><mml:mi>H</mml:mi><mml:mo>,</mml:mo><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mi>p</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>h</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>}</mml:mo></mml:mrow></mml:math></inline-formula> is defined as a feature space <inline-formula id="ieqn-34"><mml:math id="mml-ieqn-34"><mml:mi>&#x03BE;</mml:mi></mml:math></inline-formula>, and a distribution of marginal probabilities <inline-formula id="ieqn-35"><mml:math id="mml-ieqn-35"><mml:mi>g</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>h</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula>, where <inline-formula id="ieqn-36"><mml:math id="mml-ieqn-36"><mml:mi>h</mml:mi><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mi>h</mml:mi><mml:mn>1</mml:mn><mml:mo>,</mml:mo><mml:mspace width="thickmathspace" /><mml:mi>h</mml:mi><mml:mn>2</mml:mn><mml:mo>,</mml:mo><mml:mspace width="thickmathspace" /><mml:mi>h</mml:mi><mml:mn>3</mml:mn><mml:mo>,</mml:mo><mml:mo>&#x2026;</mml:mo><mml:mi>h</mml:mi><mml:mi>n</mml:mi><mml:mo>}</mml:mo></mml:mrow></mml:math></inline-formula> &#x2208; <inline-formula id="ieqn-37"><mml:math id="mml-ieqn-37"><mml:mi>&#x03BE;</mml:mi></mml:math></inline-formula>. For two different domains, the marginal probabilities will be <inline-formula id="ieqn-38"><mml:math id="mml-ieqn-38"><mml:mrow><mml:mo>(</mml:mo><mml:mi>p</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mi>&#x03BE;</mml:mi><mml:mrow><mml:mi>p</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow><mml:mspace width="thickmathspace" /><mml:mo>&#x2260;</mml:mo><mml:mspace width="thickmathspace" /><mml:mi>p</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mi>&#x03BE;</mml:mi><mml:mrow><mml:mi>q</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow><mml:mspace width="thickmathspace" /><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula>, where the feature space will be <inline-formula id="ieqn-39"><mml:math id="mml-ieqn-39"><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mi>&#x03BE;</mml:mi><mml:mrow><mml:mi>p</mml:mi><mml:mspace width="thickmathspace" /></mml:mrow></mml:msub><mml:mo>&#x2260;</mml:mo><mml:mspace width="thickmathspace" /><mml:msub><mml:mi>&#x03BE;</mml:mi><mml:mrow><mml:mi>q</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula>. Given a source domain <inline-formula id="ieqn-40"><mml:math id="mml-ieqn-40"><mml:mi>d</mml:mi><mml:msub><mml:mi>m</mml:mi><mml:mrow><mml:mi>s</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> as well learned task <inline-formula id="ieqn-41"><mml:math id="mml-ieqn-41"><mml:mi>T</mml:mi><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, a target domain <inline-formula id="ieqn-42"><mml:math id="mml-ieqn-42"><mml:mi>d</mml:mi><mml:msub><mml:mi>m</mml:mi><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> and learning task <inline-formula id="ieqn-43"><mml:math id="mml-ieqn-43"><mml:mi>T</mml:mi><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, transfer learning seeks to assist inside learning of desired predictor function <inline-formula id="ieqn-44"><mml:math id="mml-ieqn-44"><mml:mspace width="thickmathspace" /><mml:mi>f</mml:mi><mml:msub><mml:mi>n</mml:mi><mml:mrow><mml:mi>T</mml:mi><mml:mi>r</mml:mi><mml:mspace width="thickmathspace" /></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mo>.</mml:mo><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula> in <inline-formula id="ieqn-45"><mml:math id="mml-ieqn-45"><mml:mi>T</mml:mi><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> using information from the source area <inline-formula id="ieqn-46"><mml:math id="mml-ieqn-46"><mml:mi>d</mml:mi><mml:msub><mml:mi>m</mml:mi><mml:mrow><mml:mi>s</mml:mi><mml:mi>o</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> and <inline-formula id="ieqn-47"><mml:math id="mml-ieqn-47"><mml:mspace width="thickmathspace" /><mml:mi>T</mml:mi><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>, where <inline-formula id="ieqn-48"><mml:math id="mml-ieqn-48"><mml:mi>d</mml:mi><mml:msub><mml:mi>m</mml:mi><mml:mrow><mml:mi>s</mml:mi><mml:mi>o</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2260;</mml:mo><mml:mi>d</mml:mi><mml:msub><mml:mi>m</mml:mi><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> or <inline-formula id="ieqn-49"><mml:math id="mml-ieqn-49"><mml:mi>T</mml:mi><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2260;</mml:mo><mml:mi>d</mml:mi><mml:msub><mml:mi>m</mml:mi><mml:mrow><mml:mi>T</mml:mi><mml:mi>r</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>.</p>
<p>Visually, the this above process is presented in <xref ref-type="fig" rid="fig-5">Fig. 5</xref>. In this figure, it is described that knowledge of original pre-trained deep model is transfer to the newly target model. The original pre-trained deep model consists of 1000 object classes, whereas the target model includes only crops disease classes. During the training of the newly trained model, the following hyper parameters are included-learning rate is 0.05, mini batch size is 64, total epochs are 100, and learning method is ADAM.</p>
<fig id="fig-5"><label>Figure 5</label><caption><title>Process of transfer learning for crops leaf diseases</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-5.png"/></fig>
</sec>
<sec id="s3_4"><label>3.4</label><title>Fine-Tuned Darknet 19 Deep Features</title>
<p>As a feature extractor, the DarkNET-19 pre-trained CNN model is utilized in this work. Darknet-19 is the backbone of YOLOv2. To create estimates, system which was uses before <inline-formula id="ieqn-50"><mml:math id="mml-ieqn-50"><mml:mn>1</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:mn>1</mml:mn></mml:math></inline-formula> scrubbers for condense the feature maps among <inline-formula id="ieqn-51"><mml:math id="mml-ieqn-51"><mml:mn>3</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:mn>3</mml:mn><mml:mspace width="thickmathspace" /></mml:math></inline-formula> convolutions as well globally averaged pooling. Darknet19 is made up of 19 convolutional and 5 max pooling layers as well 1 fully connected and soft max layer. Originally, this model was trained on ImageNet dataset that have 1000 object classes. This network accepted an input of dimension <inline-formula id="ieqn-52"><mml:math id="mml-ieqn-52"><mml:mspace width="thickmathspace" /><mml:mn>256</mml:mn><mml:mspace width="thickmathspace" /><mml:mi>b</mml:mi><mml:mi>y</mml:mi><mml:mspace width="thickmathspace" /><mml:mn>256.</mml:mn><mml:mspace width="thickmathspace" /></mml:math></inline-formula> Original architecture of DarkNet19 is illustrated in <xref ref-type="fig" rid="fig-6">Fig. 6</xref>. The main motivation behind the choice of this model in this work is better performance on ImageNet dataset in terms of accuracy and time. In the fine-tuning process, we removed the last layer (fully connected) and added a new layer (new_FC). After that the, defined the hyper parameters and trained on selected crops leaf datasets using TL. Features are extracted from the new trained CNN model. For features extraction, sigmoid activation function is applied on global average pooling layer and obtained a feature vector of dimension <inline-formula id="ieqn-53"><mml:math id="mml-ieqn-53"><mml:mi>N</mml:mi><mml:mo>&#x00D7;</mml:mo><mml:mn>1026</mml:mn></mml:math></inline-formula>. This extracted feature vector is refined using a hybrid optimization algorithm named&#x2013;Hybrid Cuckoo Newton Raphson Optimization algorithm.</p>
<fig id="fig-6"><label>Figure 6</label><caption><title>Confusion matrix of GN Bayes classifier for potato leaf diseases recognition using proposed architecture</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-6.png"/></fig>
</sec>
<sec id="s3_5"><label>3.5</label><title>Hybrid Improved Cuckoo Newton Raphson Optimization</title>
<p>In this work, we proposed a Hybrid Improved Cuckoo Newton Raphson Optimization (HICNRO) algorithm for best features selection. The purpose of this algorithm is to minimize the redundancy in the features and increase in the accuracy. Moreover, computational time can be reducing using HICNRO. Originally, the cuckoo search algorithm is proposed by Gandomi&#x00A0;et&#x00A0;al.&#x00A0;[<xref ref-type="bibr" rid="ref-26">26</xref>] in 2013. But the convergence rate of this algorithm is not very fast that is improved in this work using Newton Raphson formulation.</p>
<p>Consider, the bird nests count is <inline-formula id="ieqn-54"><mml:math id="mml-ieqn-54"><mml:mi>c</mml:mi></mml:math></inline-formula>, current iterations count is <inline-formula id="ieqn-55"><mml:math id="mml-ieqn-55"><mml:mi>T</mml:mi></mml:math></inline-formula>, and the maximum number of iterations are <inline-formula id="ieqn-56"><mml:math id="mml-ieqn-56"><mml:mi>t</mml:mi></mml:math></inline-formula>. The initial position vector <inline-formula id="ieqn-57"><mml:math id="mml-ieqn-57"><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> of the bird&#x2019;s nest <inline-formula id="ieqn-58"><mml:math id="mml-ieqn-58"><mml:mrow><mml:mo>(</mml:mo><mml:mn>1</mml:mn><mml:mo>&#x2264;</mml:mo><mml:mi>i</mml:mi><mml:mo>&#x2264;</mml:mo><mml:mi>c</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula> is defined as <inline-formula id="ieqn-59"><mml:math id="mml-ieqn-59"><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:mo>,</mml:mo><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>I</mml:mi><mml:mn>2</mml:mn><mml:mo>,</mml:mo><mml:mo>&#x2026;</mml:mo><mml:mo>.</mml:mo><mml:mo>.</mml:mo><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /></mml:mrow></mml:msub><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>N</mml:mi></mml:mrow></mml:msub><mml:mo>}</mml:mo></mml:mrow></mml:math></inline-formula>. The bird continues explore the journey through <italic>c</italic> nests in an n-dimensional environment and the place of the cuckoo bird&#x2019;s current nest presents a fresh technique to solving this problem. The goal of the optimization process is to continuously replace prior bad solutions with new ones and it depends on two connections for seek: stochastic walk and Levy flight. The route of search is defined as follows:
<disp-formula id="eqn-12"><label>(12)</label><mml:math id="mml-eqn-12" display="block"><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:msubsup><mml:mo>+</mml:mo><mml:mrow><mml:mi mathvariant="normal">&#x03B1;</mml:mi></mml:mrow><mml:mo>&#x2295;</mml:mo><mml:mrow><mml:mtext>Levy</mml:mtext></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi mathvariant="normal">&#x03BB;</mml:mi></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula></p>
<p>The position vectors of the bird&#x2019;s nest i at the <inline-formula id="ieqn-60"><mml:math id="mml-ieqn-60"><mml:mi>T</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>T</mml:mi><mml:mi>h</mml:mi><mml:mspace width="thickmathspace" /><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mspace width="thickmathspace" /><mml:mrow><mml:mo>(</mml:mo><mml:mi>T</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>T</mml:mi><mml:mi>h</mml:mi></mml:math></inline-formula> iterations, &#x03B1; is the step size of control factor; <inline-formula id="ieqn-61"><mml:math id="mml-ieqn-61"><mml:mo>&#x2295;</mml:mo></mml:math></inline-formula> is the multiplication from one point to another, and stochastic seek route is represented by <inline-formula id="ieqn-62"><mml:math id="mml-ieqn-62"><mml:mrow><mml:mtext>Levy</mml:mtext></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula>. The relationship among <italic>T</italic> follows the <inline-formula id="ieqn-63"><mml:math id="mml-ieqn-63"><mml:mrow><mml:mtext>Levy</mml:mtext></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula> distribution as follows:
<disp-formula id="eqn-13"><label>(13)</label><mml:math id="mml-eqn-13" display="block"><mml:mi>L</mml:mi><mml:mi>e</mml:mi><mml:mi>v</mml:mi><mml:mi>y</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>&#x03BB;</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x223C;</mml:mo><mml:mi>&#x03BC;</mml:mi><mml:mo>=</mml:mo><mml:msup><mml:mi>T</mml:mi><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>&#x03BB;</mml:mi></mml:mrow></mml:msup><mml:mspace width="thickmathspace" /><mml:mo stretchy="false">(</mml:mo><mml:mn>1</mml:mn><mml:mo>&#x003C;</mml:mo><mml:mi>&#x03BB;</mml:mi><mml:mo>&#x2264;</mml:mo><mml:mn>3</mml:mn><mml:mo stretchy="false">)</mml:mo></mml:math></disp-formula></p>
<p>The step size factor is calculated using the following equation:
<disp-formula id="eqn-14"><label>(14)</label><mml:math id="mml-eqn-14" display="block"><mml:msub><mml:mi>l</mml:mi><mml:mrow><mml:mi>&#x03F5;</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mi>Z</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mtable columnalign="left left" rowspacing=".2em" columnspacing="1em" displaystyle="false"><mml:mtr><mml:mtd><mml:mn>0</mml:mn><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mi>i</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thickmathspace" /><mml:mrow><mml:mo fence="false" stretchy="false">|</mml:mo></mml:mrow><mml:mi>Z</mml:mi><mml:mrow><mml:mo fence="false" stretchy="false">|</mml:mo></mml:mrow><mml:mo>&#x2264;</mml:mo><mml:mspace width="thickmathspace" /><mml:mi>&#x03F5;</mml:mi><mml:mo>;</mml:mo></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mrow><mml:mo fence="false" stretchy="false">|</mml:mo></mml:mrow><mml:mi>Z</mml:mi><mml:mrow><mml:mo fence="false" stretchy="false">|</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>&#x03F5;</mml:mi><mml:mo>,</mml:mo><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mrow><mml:mtext mathvariant="italic">otherwise</mml:mtext></mml:mrow><mml:mo>.</mml:mo><mml:mspace width="thickmathspace" /></mml:mtd></mml:mtr></mml:mtable><mml:mo fence="true" stretchy="true" symmetric="true"></mml:mo></mml:mrow></mml:math></disp-formula></p>
<p>Based on <inline-formula id="ieqn-64"><mml:math id="mml-ieqn-64"><mml:msub><mml:mi>l</mml:mi><mml:mrow><mml:mi>&#x03F5;</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:mi>Z</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula>, the present optimal solution is computed as:
<disp-formula id="eqn-15"><label>(15)</label><mml:math id="mml-eqn-15" display="block"><mml:mi>&#x03B1;</mml:mi><mml:mo>=</mml:mo><mml:msub><mml:mi>&#x03B1;</mml:mi><mml:mrow><mml:mn>0</mml:mn></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:msubsup><mml:mo>&#x2212;</mml:mo><mml:mspace width="thickmathspace" /><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula>
where <inline-formula id="ieqn-65"><mml:math id="mml-ieqn-65"><mml:msub><mml:mi>&#x03B1;</mml:mi><mml:mrow><mml:mn>0</mml:mn></mml:mrow></mml:msub><mml:mspace width="thickmathspace" /></mml:math></inline-formula> is constant, initialized as 0.02, <inline-formula id="ieqn-66"><mml:math id="mml-ieqn-66"><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:msubsup></mml:math></inline-formula> is current feature <inline-formula id="ieqn-67"><mml:math id="mml-ieqn-67"><mml:mo>&#x2208;</mml:mo><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup></mml:math></inline-formula> and <inline-formula id="ieqn-68"><mml:math id="mml-ieqn-68"><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> is the current best solution. To calculate the stochastic numbers, the following formulation is defined:
<disp-formula id="eqn-16"><label>(16)</label><mml:math id="mml-eqn-16" display="block"><mml:mi>f</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>Y</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mspace width="thickmathspace" /></mml:mrow></mml:msub><mml:mo>&#x2264;</mml:mo><mml:mrow><mml:mi mathvariant="normal">&#x03F5;</mml:mi></mml:mrow><mml:mo>+</mml:mo><mml:msub><mml:mi>&#x03BE;</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:math></disp-formula>
<disp-formula id="eqn-17"><label>(17)</label><mml:math id="mml-eqn-17" display="block"><mml:mi>L</mml:mi><mml:mi>e</mml:mi><mml:mi>v</mml:mi><mml:mi>y</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>&#x03BB;</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x223C;</mml:mo><mml:mspace width="thickmathspace" /><mml:mfrac><mml:mrow><mml:mi>&#x03C6;</mml:mi><mml:mi>&#x03BC;</mml:mi></mml:mrow><mml:mrow><mml:mrow><mml:mo fence="false" stretchy="false">|</mml:mo></mml:mrow><mml:mi>v</mml:mi><mml:msup><mml:mrow><mml:mo fence="false" stretchy="false">|</mml:mo></mml:mrow><mml:mrow><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:mi>&#x03B2;</mml:mi></mml:mrow></mml:mfrac></mml:mrow></mml:msup></mml:mrow></mml:mfrac></mml:math></disp-formula>
<disp-formula id="eqn-18"><label>(18)</label><mml:math id="mml-eqn-18" display="block"><mml:mrow><mml:mi>&#x03B2;</mml:mi></mml:mrow><mml:mo>=</mml:mo><mml:mn>1.5</mml:mn><mml:mo>,</mml:mo><mml:mi>&#x03C6;</mml:mi><mml:mo>=</mml:mo><mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mi mathvariant="normal">&#x0393;</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mn>1</mml:mn><mml:mo>+</mml:mo><mml:mrow><mml:mi>&#x03B2;</mml:mi></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x00D7;</mml:mo><mml:mi>sin</mml:mi><mml:mo>&#x2061;</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mi>&#x03C0;</mml:mi><mml:mrow><mml:mi>&#x03B2;</mml:mi></mml:mrow></mml:mrow><mml:mn>2</mml:mn></mml:mfrac></mml:mstyle></mml:mrow><mml:mrow><mml:mi mathvariant="normal">&#x0393;</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mfrac><mml:mrow><mml:mn>1</mml:mn><mml:mo>+</mml:mo><mml:mrow><mml:mi>&#x03B2;</mml:mi></mml:mrow></mml:mrow><mml:mn>2</mml:mn></mml:mfrac></mml:mstyle><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x00D7;</mml:mo><mml:mrow><mml:mi>&#x03B2;</mml:mi></mml:mrow><mml:mo>&#x00D7;</mml:mo><mml:msup><mml:mn>2</mml:mn><mml:mrow><mml:mfrac><mml:mrow><mml:mrow><mml:mi>&#x03B2;</mml:mi></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mn>2</mml:mn></mml:mfrac></mml:mrow></mml:msup><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mfrac></mml:mstyle><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msup></mml:math></disp-formula></p>
<p>By integrating <inline-formula id="ieqn-69"><mml:math id="mml-ieqn-69"><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup></mml:math></inline-formula> and <inline-formula id="ieqn-70"><mml:math id="mml-ieqn-70"><mml:mi>L</mml:mi><mml:mi>e</mml:mi><mml:mi>v</mml:mi><mml:mi>y</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>&#x03BB;</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula>, the cuckoo search produce new solutions as follows:
<disp-formula id="eqn-19"><label>(19)</label><mml:math id="mml-eqn-19" display="block"><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:msubsup><mml:mo>+</mml:mo><mml:msub><mml:mi>&#x03B1;</mml:mi><mml:mrow><mml:mn>0</mml:mn></mml:mrow></mml:msub><mml:mfrac><mml:mrow><mml:mi>&#x03D5;</mml:mi><mml:mi>&#x03BC;</mml:mi></mml:mrow><mml:mrow><mml:mrow><mml:mo fence="false" stretchy="false">|</mml:mo></mml:mrow><mml:mi>v</mml:mi><mml:msup><mml:mrow><mml:mo fence="false" stretchy="false">|</mml:mo></mml:mrow><mml:mrow><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:mi>&#x03B2;</mml:mi></mml:mrow></mml:mfrac></mml:mrow></mml:msup></mml:mrow></mml:mfrac><mml:mrow><mml:mo>(</mml:mo><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:msubsup><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula></p>
<p>The stochastic number <inline-formula id="ieqn-71"><mml:math id="mml-ieqn-71"><mml:mrow><mml:mtext>rand</mml:mtext></mml:mrow><mml:mrow><mml:mi mathvariant="normal">&#x03F5;</mml:mi></mml:mrow><mml:mrow><mml:mo>[</mml:mo><mml:mn>0</mml:mn><mml:mo>,</mml:mo><mml:mn>1</mml:mn><mml:mo>]</mml:mo></mml:mrow></mml:math></inline-formula> is compared to the finding probability after update each position. The <inline-formula id="ieqn-72"><mml:math id="mml-ieqn-72"><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mspace width="thickmathspace" /></mml:math></inline-formula> is modified if<inline-formula id="ieqn-73"><mml:math id="mml-ieqn-73"><mml:mspace width="thickmathspace" /><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mi>d</mml:mi><mml:mo>&#x003E;</mml:mo><mml:mspace width="thickmathspace" /><mml:mrow><mml:mtext mathvariant="italic">probability</mml:mtext></mml:mrow></mml:math></inline-formula>, otherwise it remains unchanged. To alter<inline-formula id="ieqn-74"><mml:math id="mml-ieqn-74"><mml:mspace width="thickmathspace" /><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup></mml:math></inline-formula>, the preferred stochastic walk is used to create the same amount of new solutions. Mathematically, the stochastic walk desire is defined as follows:
<disp-formula id="eqn-20"><label>(20)</label><mml:math id="mml-eqn-20" display="block"><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:msubsup><mml:mo>+</mml:mo><mml:mi>r</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>J</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:msubsup><mml:mo>&#x2212;</mml:mo><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>K</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:msubsup><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula>where <inline-formula id="ieqn-75"><mml:math id="mml-ieqn-75"><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>J</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:msubsup></mml:math></inline-formula> and <inline-formula id="ieqn-76"><mml:math id="mml-ieqn-76"><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>K</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:msubsup></mml:math></inline-formula> are two stochastic outcomes of the <inline-formula id="ieqn-77"><mml:math id="mml-ieqn-77"><mml:msub><mml:mi>t</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mi>h</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> iteration and r is a homogeneous distributed stochastic result in the <inline-formula id="ieqn-78"><mml:math id="mml-ieqn-78"><mml:mrow><mml:mo>[</mml:mo><mml:mn>0</mml:mn><mml:mo>,</mml:mo><mml:mn>1</mml:mn><mml:mo>]</mml:mo></mml:mrow><mml:mspace width="thickmathspace" /></mml:math></inline-formula> interval. After that, the direction of each cuckoo is updated with faster speed by using the following formulation.
<disp-formula id="eqn-21"><label>(21)</label><mml:math id="mml-eqn-21" display="block"><mml:mi>L</mml:mi><mml:mo>=</mml:mo><mml:mi>L</mml:mi><mml:mo>+</mml:mo><mml:mi>&#x03B1;</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mi>N</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>,</mml:mo><mml:mrow><mml:mi mathvariant="normal">&#x03B1;</mml:mi></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mtext>L</mml:mtext></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula>
<disp-formula id="eqn-22"><label>(22)</label><mml:math id="mml-eqn-22" display="block"><mml:mi>N</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:msubsup><mml:mrow><mml:mo>&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:msub><mml:mi>P</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:msubsup><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mn>1</mml:mn><mml:mo>,</mml:mo></mml:mrow></mml:msub><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mn>2</mml:mn><mml:mo>,</mml:mo><mml:mo>&#x2026;</mml:mo><mml:mo>.</mml:mo><mml:mo>.</mml:mo><mml:mo>,</mml:mo></mml:mrow></mml:msub><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>D</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:msub><mml:mi>P</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:mfrac></mml:math></disp-formula>
<disp-formula id="eqn-23"><label>(23)</label><mml:math id="mml-eqn-23" display="block"><mml:msub><mml:mi>P</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mi>N</mml:mi><mml:mi>E</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi>N</mml:mi><mml:mi>E</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>3</mml:mn><mml:mo>)</mml:mo></mml:mrow><mml:msqrt><mml:mfrac><mml:mi>b</mml:mi><mml:msup><mml:mi>B</mml:mi><mml:mrow><mml:mi>m</mml:mi><mml:mi>a</mml:mi><mml:mi>x</mml:mi></mml:mrow></mml:msup></mml:mfrac></mml:msqrt></mml:math></disp-formula>where <italic>L</italic> represents the Levy distribution regulates step length, <inline-formula id="ieqn-79"><mml:math id="mml-ieqn-79"><mml:mi>N</mml:mi><mml:mi>E</mml:mi></mml:math></inline-formula> represents the total number of nests, and <italic>b</italic> and <inline-formula id="ieqn-80"><mml:math id="mml-ieqn-80"><mml:msup><mml:mi>B</mml:mi><mml:mrow><mml:mi>m</mml:mi><mml:mi>a</mml:mi><mml:mi>x</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> are the present and total iterations. The additional parameter <inline-formula id="ieqn-81"><mml:math id="mml-ieqn-81"><mml:mi>N</mml:mi><mml:mi>e</mml:mi><mml:msub><mml:mi>s</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> is a prospective search space that will be modified constantly after each iteration, and <inline-formula id="ieqn-82"><mml:math id="mml-ieqn-82"><mml:msub><mml:mi>P</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> is the swarm hierarchy that will be improved with a downward trend after each iteration. At the end of the loop, <inline-formula id="ieqn-83"><mml:math id="mml-ieqn-83"><mml:msub><mml:mi>P</mml:mi><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> is assigned a value of 2. The parameter &#x03B1; is restricted in the range [0, 2] for enlarging the search space around the potential region according to the stochastic optimization algorithm. As a result, the alteration of step length <italic>L</italic> called <inline-formula id="ieqn-84"><mml:math id="mml-ieqn-84"><mml:msup><mml:mi>L</mml:mi><mml:mrow><mml:mrow><mml:mo>&#x2217;</mml:mo></mml:mrow></mml:mrow></mml:msup></mml:math></inline-formula> will be addressed first in any iteration.
<disp-formula id="eqn-24"><label>(24)</label><mml:math id="mml-eqn-24" display="block"><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi></mml:mrow></mml:msubsup><mml:mo>+</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi>&#x03B1;</mml:mi><mml:mo>.</mml:mo><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi></mml:mrow></mml:msubsup><mml:mo>&#x2212;</mml:mo><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msubsup><mml:mo>)</mml:mo></mml:mrow><mml:mo>+</mml:mo><mml:mi>&#x03B7;</mml:mi><mml:mo>.</mml:mo><mml:mspace width="thickmathspace" /><mml:mrow><mml:mo fence="false" stretchy="false">|</mml:mo></mml:mrow><mml:msup><mml:mi>L</mml:mi><mml:mrow><mml:mrow><mml:mo>&#x2217;</mml:mo></mml:mrow></mml:mrow></mml:msup><mml:mrow><mml:mo fence="false" stretchy="false">|</mml:mo></mml:mrow></mml:math></disp-formula>
where &#x03B1; will be in the same range as [0, 2], <inline-formula id="ieqn-85"><mml:math id="mml-ieqn-85"><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi></mml:mrow></mml:msubsup></mml:math></inline-formula> is the best approach for <inline-formula id="ieqn-86"><mml:math id="mml-ieqn-86"><mml:msup><mml:mi>b</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mi>h</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> iteration, and <inline-formula id="ieqn-87"><mml:math id="mml-ieqn-87"><mml:mi>&#x03B7;</mml:mi></mml:math></inline-formula> is the orientation parameter specified in the preceding section. The orientation parameter is the one described in the preceding section and <inline-formula id="ieqn-88"><mml:math id="mml-ieqn-88"><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi></mml:mrow></mml:msubsup></mml:math></inline-formula> was the best answer. Later step, the Newton Raphson (NR) equation is opted to stop the numbers of iterations and got the fixed numeric value for final solution. Mathematically, the NR is defined as follows:
<disp-formula id="eqn-25"><label>(25)</label><mml:math id="mml-eqn-25" display="block"><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mfrac><mml:mrow><mml:mi>H</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:msup><mml:mi>H</mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x2032;</mml:mi></mml:mrow></mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mfrac><mml:mo>,</mml:mo><mml:mspace width="thinmathspace" /><mml:mspace width="thinmathspace" /><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2208;</mml:mo><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup></mml:math></disp-formula>where, <inline-formula id="ieqn-89"><mml:math id="mml-ieqn-89"><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:math></inline-formula> is NR resultant value and <inline-formula id="ieqn-90"><mml:math id="mml-ieqn-90"><mml:msup><mml:mi>H</mml:mi><mml:mrow><mml:mi mathvariant="normal">&#x2032;</mml:mi></mml:mrow></mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub><mml:mo>)</mml:mo></mml:mrow></mml:math></inline-formula> is first derivative value and <inline-formula id="ieqn-91"><mml:math id="mml-ieqn-91"><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> represents <inline-formula id="ieqn-92"><mml:math id="mml-ieqn-92"><mml:msubsup><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow><mml:mrow><mml:mi>b</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msubsup></mml:math></inline-formula> initial feature values. Based on the resultant value <inline-formula id="ieqn-93"><mml:math id="mml-ieqn-93"><mml:msub><mml:mi>F</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:math></inline-formula>, the best selected feature vector is obtained having a dimension of <inline-formula id="ieqn-94"><mml:math id="mml-ieqn-94"><mml:mi>N</mml:mi><mml:mo>&#x00D7;</mml:mo><mml:mn>704</mml:mn></mml:math></inline-formula>. The selected feature vector is passed to multiclass SVM classifier for final classification results.</p>
</sec>
</sec>
<sec id="s4"><label>4</label><title>Experimental Results and Discussion</title>
<p>Two publically available datasets named Plant village and Cucumber Leaf Images are utilized in this work for the experimental process. The 50&#x0025; of the images of each dataset are utilized for the training purpose, whereas the 50&#x0025; of the rest of the images are employed for the testing purpose. After that, the 10-Fold cross validation is opted for the classification results. Ten different classifiers such as fine tree (FT), Medium tree (MT), Linear Discriminant (LD), Coarse tree, Medium Gaussian SVM (MG-SVM), Gaussian Naive Bayes (GN Bayes), Kernel Naive Bayes (KN Bayes), Linear support vector machine (SVM), Q-SVM, and Cubic SVM are employed for the classification comparison. The each classifier performance is computed based on the six performance measures such as sensitivity, precision, F1-Score, False-Positive rate (FPR), accuracy, and testing time per second. The entire proposed architecture is implemented on MATLAB 2021a using a Corei7 processor with 32GB of RAM and a 64-bit operating system. Moreover, a 8GB graphics card is also employed.</p>
<sec id="s4_1"><label>4.1</label><title>Results</title>
<p><bold>Potato Leaf Diseases Classification Results:</bold> The numerical results of tomato leaf diseases recognition are presented in <xref ref-type="table" rid="table-2">Tab. 2</xref>. This table presents the best accuracy of 100&#x0025; for GN Bayes classifier. The other calculated measures such as sensitivity rate are 100, precision rate is 100, and F1-Score is 100&#x0025;, respectively. <xref ref-type="fig" rid="fig-6">Fig. 6</xref> showing the confusion matrix of GN Bayes that can utilized to verify the sensitivity rate. The computation time of the testing process is 9.68 (sec) for GN Bayes classifier. The rest of the classifiers listed in this table also performed well and attained an accuracy of above 98&#x0025;.</p>
<table-wrap id="table-2"><label>Table 2</label><caption><title>Classification results of potato leaf diseases recognition using proposed architecture</title></caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th align="left">Sr.&#x0023;</th>
<th align="left">Classifier</th>
<th align="left">Sensitivity (&#x0025;)</th>
<th align="left">Precision (&#x0025;)</th>
<th align="left">F1-Score</th>
<th align="left">FPR</th>
<th align="left">Accuracy (&#x0025;)</th>
<th align="left">Time<break/>/sec</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left"><bold>1</bold></td>
<td align="left">FT</td>
<td align="left">97.8</td>
<td align="left">98.03</td>
<td align="left">97.91</td>
<td align="left">0.013</td>
<td align="left">98.0</td>
<td align="left">5.55</td>
</tr>
<tr>
<td align="left"><bold>2</bold></td>
<td align="left">Medium tree</td>
<td align="left">97.8</td>
<td align="left">99.03</td>
<td align="left">97.91</td>
<td align="left">0.013</td>
<td align="left">98.0</td>
<td align="left">5.83</td>
</tr>
<tr>
<td align="left"><bold>3</bold></td>
<td align="left">Coarse tree</td>
<td align="left">94.23</td>
<td align="left">94</td>
<td align="left">94.11</td>
<td align="left">0.343</td>
<td align="left">94.3</td>
<td align="left">5.60</td>
</tr>
<tr>
<td align="left"><bold>4</bold></td>
<td align="left">LDA</td>
<td align="left">99.9</td>
<td align="left">99.9</td>
<td align="left">99.9</td>
<td align="left">0</td>
<td align="left">99.9</td>
<td align="left">3.64</td>
</tr>
<tr>
<td align="left"><bold>5</bold></td>
<td align="left">MG-SVM</td>
<td align="left">99.93</td>
<td align="left">99.96</td>
<td align="left">99.94</td>
<td align="left">0</td>
<td align="left">99.9</td>
<td align="left">12.15</td>
</tr>
<tr>
<td align="left"><bold>6</bold></td>
<td align="left"><bold>GN Bayes</bold></td>
<td align="left">100</td>
<td align="left">100</td>
<td align="left">100</td>
<td align="left">0</td>
<td align="left"><bold>100</bold></td>
<td align="left">9.68</td>
</tr>
<tr>
<td align="left"><bold>7</bold></td>
<td align="left">KN Bayes</td>
<td align="left">99.86</td>
<td align="left">99.83</td>
<td align="left">99.84</td>
<td align="left">0</td>
<td align="left">99.8</td>
<td align="left">10.61</td>
</tr>
<tr>
<td align="left"><bold>8</bold></td>
<td align="left">Linear SVM</td>
<td align="left">99.96</td>
<td align="left">99.93</td>
<td align="left">99.94</td>
<td align="left">0</td>
<td align="left">99.9</td>
<td align="left">9.45</td>
</tr>
<tr>
<td align="left"><bold>9</bold></td>
<td align="left">Q-SVM</td>
<td align="left">99.9</td>
<td align="left">99.9</td>
<td align="left">99.9</td>
<td align="left">0</td>
<td align="left">99.9</td>
<td align="left">5.25</td>
</tr>
<tr>
<td align="left"><bold>10</bold></td>
<td align="left">Cubic SVM</td>
<td align="left">97.8</td>
<td align="left">98.03</td>
<td align="left">97.91</td>
<td align="left">0.013</td>
<td align="left">98.0</td>
<td align="left">7.55</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>To analyze the performance of proposed framework, we also computed the results of all extracted deep features and compared with proposed framework in terms of accuracy and time. <xref ref-type="fig" rid="fig-7">Fig. 7</xref> showing the accuracy based comparison of proposed framework and all extracted features of fine-tuned DarkNet19. This figure showing that the accuracy is improved almost 2&#x0025;&#x2013;3&#x0025; after employing proposed feature selection algorithm. <xref ref-type="fig" rid="fig-14">Fig. 14</xref> showing the testing time based comparison of proposed framework with original fine-tuned DarkNet19 extracted features. Based on this figure, it is observed that the time is significantly reduced after applying feature selection algorithm.</p>
<fig id="fig-7"><label>Figure 7</label><caption><title>Comparison among all features and best selected features for Potato leaf diseases in terms of accuracy</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-7.png"/></fig>
<fig id="fig-8"><label>Figure 8</label><caption><title>Comparison among all features and best selected features for Potato leaf diseases in terms of testing time</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-8.png"/></fig>
<p><bold>Tomato Leaf Diseases Classification Results:</bold> Potato leaf diseases recognition results are presented in <xref ref-type="table" rid="table-3">Tab. 3</xref>. In this table, the best attained accuracy of 92.9&#x0025; for GN Bayes classifier. The sensitivity rate of GN Bayes is 94.49&#x0025;, whereas the precision rate and F1-Score values are 94.19&#x0025; and 94.33&#x0025;. <xref ref-type="fig" rid="fig-9">Fig. 9</xref> showing the confusion matrix of GN Bayes that can utilized to verify the sensitivity rate. The computation time of the GN Bayes during the testing process is 21.433 (sec). The minimum noted computational time for this experiment is 9.359 (sec). The recognition accuracy for the rest of the classifiers listed in this table attained an average accuracy of 90&#x0025;. The performance of proposed framework is also compared with the original features extraction from fine-tuned DarkNet19 CNN model in terms of accuracy and time. <xref ref-type="fig" rid="fig-10">Fig. 10</xref> showing the accuracy based comparison of propose framework and all extracted features of fine-tuned DarkNet19. This figure showing that the accuracy is improved almost 3&#x0025;&#x2013;4&#x0025; after employing proposed feature selection algorithm. <xref ref-type="fig" rid="fig-11">Fig. 11</xref> showing the testing time based comparison of proposed framework with original fine-tuned DarkNet19 extracted features. This figure shows that the testing time is significantly reduced after using proposed optimization algorithm.</p>
<table-wrap id="table-3"><label>Table 3</label><caption><title>Classification results of tomato leaf diseases recognition using proposed architecture</title></caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th align="left">Sr.&#x0023;</th>
<th align="left">Classifier</th>
<th align="left">Sensitivity (&#x0025;)</th>
<th align="left">Precision (&#x0025;)</th>
<th align="left">F1-Score</th>
<th align="left">FPR</th>
<th align="left">Accuracy (&#x0025;)</th>
<th align="left">Time<break/>/sec</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left"><bold>1</bold></td>
<td align="left">FT</td>
<td align="left">85.8</td>
<td align="left">85.69</td>
<td align="left">85.74</td>
<td align="left">0.015</td>
<td align="left">87.4</td>
<td align="left">20.255</td>
</tr>
<tr>
<td align="left"><bold>2</bold></td>
<td align="left">Medium tree</td>
<td align="left">68.57</td>
<td align="left">69.25</td>
<td align="left">68.90</td>
<td align="left">0.026</td>
<td align="left">76.9</td>
<td align="left">13.091</td>
</tr>
<tr>
<td align="left"><bold>3</bold></td>
<td align="left">Coarse tree</td>
<td align="left">36.08</td>
<td align="left">25.43</td>
<td align="left">29.83</td>
<td align="left">0.053</td>
<td align="left">52.9</td>
<td align="left"><bold>9.359</bold></td>
</tr>
<tr>
<td align="left"><bold>4</bold></td>
<td align="left">LDA</td>
<td align="left">94.3</td>
<td align="left">94.29</td>
<td align="left">94.29</td>
<td align="left">0.007</td>
<td align="left">92.7</td>
<td align="left">17.146</td>
</tr>
<tr>
<td align="left"><bold>5</bold></td>
<td align="left">MG-SVM</td>
<td align="left">79.55</td>
<td align="left">84.44</td>
<td align="left">84.20</td>
<td align="left">0.008</td>
<td align="left">92.0</td>
<td align="left">58.245</td>
</tr>
<tr>
<td align="left"><bold>6</bold></td>
<td align="left"><bold>GN Bayes</bold></td>
<td align="left">94.49</td>
<td align="left">94.19</td>
<td align="left">94.33</td>
<td align="left">0.007</td>
<td align="left"><bold>92.9</bold></td>
<td align="left">21.433</td>
</tr>
<tr>
<td align="left"><bold>7</bold></td>
<td align="left">KN Bayes</td>
<td align="left">92.76</td>
<td align="left">92.37</td>
<td align="left">92.56</td>
<td align="left">0.008</td>
<td align="left">91.7</td>
<td align="left">30.53</td>
</tr>
<tr>
<td align="left"><bold>8</bold></td>
<td align="left">Linear SVM</td>
<td align="left">90.58</td>
<td align="left">90.64</td>
<td align="left">90.60</td>
<td align="left">0.009</td>
<td align="left">91.9</td>
<td align="left">38.165</td>
</tr>
<tr>
<td align="left"><bold>9</bold></td>
<td align="left">Q-SVM</td>
<td align="left">89.32</td>
<td align="left">89.31</td>
<td align="left">89.31</td>
<td align="left">0.01</td>
<td align="left">90.7</td>
<td align="left">45.869</td>
</tr>
<tr>
<td align="left"><bold>10</bold></td>
<td align="left">Cubic SVM</td>
<td align="left">89.14</td>
<td align="left">89.13</td>
<td align="left">89.13</td>
<td align="left">0.01</td>
<td align="left">90.4</td>
<td align="left">43.062</td>
</tr>
</tbody>
</table>
</table-wrap>
<fig id="fig-9"><label>Figure 9</label><caption><title>Confusion matrix of GN Bayes classifier for Tomato leaf diseases recognition using proposed architecture</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-9.png"/></fig>
<fig id="fig-10"><label>Figure 10</label><caption><title>Comparison among all features and best selected features for Tomato leaf diseases in terms of accuracy</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-10.png"/></fig>
<fig id="fig-11"><label>Figure 11</label><caption><title>Comparison among all features and best selected features for Tomato leaf diseases in terms of testing time</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-11.png"/></fig>
<p><bold>Cucumber Leaf Diseases Classification Results:</bold> <xref ref-type="table" rid="table-4">Tab. 4</xref> presented the results of proposed framework for cucumber leaf diseases recognition. This table presents the best attained accuracy of 99.2&#x0025; for LSVM classifier. The other calculated measures of LSVM classifier such as sensitivity rate of 99.14, precision rate of 99.16, and F1-Score is 99.14&#x0025;, respectively. The sensitivity rate of LSVM is verified by a confusion matrix, illustrated in <xref ref-type="fig" rid="fig-12">Fig. 12</xref>. In this figure, the diagonal values represent the correct prediction rate of each class. The computation time of LSVM is 6.861 (sec), whereas the minimum noted time is 2.045 (sec) of Coarse tree classifier. The average classification accuracy for rest of the classifier is above 95&#x0025; except Coarse Tree.</p>
<table-wrap id="table-4"><label>Table 4</label><caption><title>Classification results of cucumber leaf diseases recognition using proposed architecture</title></caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th align="left">Sr.&#x0023;</th>
<th align="left">Classifier</th>
<th align="left">Sensitivity (&#x0025;)</th>
<th align="left">Precision (&#x0025;)</th>
<th align="left">F1-Score</th>
<th align="left">FPR</th>
<th align="left">Accuracy (&#x0025;)</th>
<th align="left">Time<break/>/sec</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left"><bold>1</bold></td>
<td align="left">FT</td>
<td align="left">94.86</td>
<td align="left">94.86</td>
<td align="left">94.86</td>
<td align="left">0.014</td>
<td align="left">95.0</td>
<td align="left">5.070</td>
</tr>
<tr>
<td align="left"><bold>2</bold></td>
<td align="left">Medium tree</td>
<td align="left">85.9</td>
<td align="left">86.06</td>
<td align="left">85.97</td>
<td align="left">0.036</td>
<td align="left">86.3</td>
<td align="left">2.567</td>
</tr>
<tr>
<td align="left"><bold>3</bold></td>
<td align="left">Coarse tree</td>
<td align="left">68.22</td>
<td align="left">77.68</td>
<td align="left">72.64</td>
<td align="left">0.08</td>
<td align="left">68.2</td>
<td align="left">2.045</td>
</tr>
<tr>
<td align="left"><bold>4</bold></td>
<td align="left">LDA</td>
<td align="left">98.92</td>
<td align="left">98.94</td>
<td align="left">98.92</td>
<td align="left">0.002</td>
<td align="left">99.0</td>
<td align="left">3.446</td>
</tr>
<tr>
<td align="left"><bold>5</bold></td>
<td align="left">MG-SVM</td>
<td align="left">99.1</td>
<td align="left">99.12</td>
<td align="left">99.10</td>
<td align="left">0.002</td>
<td align="left">99.2</td>
<td align="left">7.341</td>
</tr>
<tr>
<td align="left"><bold>6</bold></td>
<td align="left">GN Bayes</td>
<td align="left">98.98</td>
<td align="left">99</td>
<td align="left">98.98</td>
<td align="left">0.002</td>
<td align="left">99</td>
<td align="left">6.886</td>
</tr>
<tr>
<td align="left"><bold>7</bold></td>
<td align="left">KN Bayes</td>
<td align="left">98.18</td>
<td align="left">98.26</td>
<td align="left">98.21</td>
<td align="left">0.004</td>
<td align="left">98.3</td>
<td align="left">14.99</td>
</tr>
<tr>
<td align="left"><bold>8</bold></td>
<td align="left"><bold>Linear SVM</bold></td>
<td align="left">99.14</td>
<td align="left">99.16</td>
<td align="left">99.14</td>
<td align="left">0.002</td>
<td align="left"><bold>99.2</bold></td>
<td align="left">6.861</td>
</tr>
<tr>
<td align="left"><bold>9</bold></td>
<td align="left">Q-SVM</td>
<td align="left">99.02</td>
<td align="left">99.04</td>
<td align="left">99.02</td>
<td align="left">0.002</td>
<td align="left">99.1</td>
<td align="left">6.149</td>
</tr>
<tr>
<td align="left"><bold>10</bold></td>
<td align="left">Cubic SVM</td>
<td align="left">98.88</td>
<td align="left">98.9</td>
<td align="left">98.88</td>
<td align="left">0.002</td>
<td align="left">99.0</td>
<td align="left">6.531</td>
</tr>
</tbody>
</table>
</table-wrap>
<fig id="fig-12"><label>Figure 12</label><caption><title>Confusion matrix of Linear SVM classifier for Cucumber leaf diseases recognition using proposed architecture</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-12.png"/></fig>
<p>The performance of proposed framework is also analyzed based on the accuracy computed for all extracted deep features of fine-tuned DarkNet19 in terms of accuracy and time. <xref ref-type="fig" rid="fig-13">Fig. 13</xref> showing the accuracy based comparison of propose framework and all extracted features of fine-tuned DarkNet19 model. This figure showing that the accuracy is improved almost 4&#x0025;&#x2013;5&#x0025; for proposed features optimization algorithm. <xref ref-type="fig" rid="fig-14">Fig. 14</xref> showing the testing time based comparison of proposed framework with original fine-tuned DarkNet19 extracted features. This figure represent that the computational time of proposed framework (best features) is better than the originally extracted deep features of DarkNet19.</p>
<fig id="fig-13"><label>Figure 13</label><caption><title>Comparison among all features and best selected features for Cucumber leaf diseases in terms of accuracy</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-13.png"/></fig>
<fig id="fig-14"><label>Figure 14</label><caption><title>Comparison among all features and best selected features for Cucumber leaf diseases in terms of testing time</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-14.png"/></fig>
</sec>
<sec id="s4_2"><label>4.2</label><title>Discussion</title>
<p>At the end, the proposed method is discussed and shows the importance of each step. <xref ref-type="fig" rid="fig-3">Fig. 3</xref> illustrated the proposed framework that based on the contrast enhancement and data augmentation. Contrast enhancement visual results are illustrated in <xref ref-type="fig" rid="fig-15">Fig. 15</xref> that shows the improvement after the proposed enhanced technique. The resultant enhanced images are utilized for the training of fine-tuned DarkNet19 model that later exploited for the deep features extraction. After that, the proposed optimization algorithm is applied and results are given in above tables and plots. The results show that the proposed framework gives better results.</p>
<fig id="fig-15"><label>Figure 15</label><caption><title>Visual results of contrast enhancement technique</title></caption><graphic mimetype="image" mime-subtype="png" xlink:href="CMC_28824-fig-15.png"/></fig>
<p>Finally, the proposed method accuracy is compared with recent state of the art (SOTA) techniques, as presented in <xref ref-type="table" rid="table-5">Tab. 5</xref>. In this table, it is noted that the recently published techniques attained the maximum accuracy of 91.67&#x0025; for potato leaf diseases, 87.11 for tomato, and 98.4&#x0025; for cucumber. The proposed method achieved an improved accuracy of 100&#x0025;, 92.9&#x0025;, and 99.2&#x0025;, respectively of each crop.</p>
<table-wrap id="table-5"><label>Table 5</label><caption><title>Comparison of proposed method accuracy with SOTA techniques</title></caption>
<table frame="hsides">
<colgroup>
<col align="left"/>
<col align="left"/>
<col align="left"/>
<col align="left"/>
</colgroup>
<thead>
<tr>
<th align="left">References</th>
<th align="left">Crop</th>
<th align="left">Dataset</th>
<th align="left">Accuracy (&#x0025;)</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left">[<xref ref-type="bibr" rid="ref-2">2</xref>], 2021</td>
<td align="left">Potato</td>
<td align="left">Plant village (3 Classes)</td>
<td align="left">91.67</td>
</tr>
<tr>
<td align="left">[<xref ref-type="bibr" rid="ref-2">2</xref>], 2021</td>
<td align="left">Tomato</td>
<td align="left">Plant village (10 Classes)</td>
<td align="left">87.11</td>
</tr>
<tr>
<td align="left">[<xref ref-type="bibr" rid="ref-27">27</xref>], 2022</td>
<td align="left">Cucumber</td>
<td align="left">Cucumber nation dataset (China)</td>
<td align="left">96.5</td>
</tr>
<tr>
<td align="left">[<xref ref-type="bibr" rid="ref-28">28</xref>], 2022</td>
<td align="left">Cucumber</td>
<td align="left">Cucumber nation dataset (China)</td>
<td align="left">98.4</td>
</tr>
<tr>
<td align="left" rowspan="3">Proposed</td>
<td align="left">Potato</td>
<td align="left">Plant village (3 Classes)</td>
<td align="left"><bold>100</bold></td>
</tr>
<tr>
<td align="left">Tomato</td>
<td align="left">Plant village (10 Classes)</td>
<td align="left"><bold>92.9</bold></td>
</tr>
<tr>
<td align="left">Cucumber</td>
<td align="left">Cucumber nation dataset (China)</td>
<td align="left"><bold>99.2</bold></td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
</sec>
<sec id="s5"><label>5</label><title>Conclusion</title>
<p>In this article, we proposed a deep learning and optimization based sequence architecture for crops leaf diseases recognition. The developed architecture consists of five sequential steps. In the first step, data augmentation is performed based on the contrast enhancement and rotation operations to increase the numbers of training samples. In the next step, a pre-trained DarkNet19 deep model is opted and fine-tuned based on the last layers. In third step, fine-tuned model is trained using transfer learning and deep features are extracted from the global pooling layer. In the fourth step, Improved Cuckoo search algorithm based best features are selected that are finally classified in fifth step using machine learning classifiers. The experimental process of the proposed series architecture is conducted on publically available datasets and shows the improved accuracy. Based on the results, we conclude that the data augmentation step improves the learning capability of a CNN model. The main aim behind the selection of DarkNet19 is less number of parameters than few other pre-trained models. However, during the analysis, it is noted that few irrelevant features are also extracted. Therefore, the improved optimization algorithm is developed. In the future, the following points shall be considered: i) Advanced CNN model shall be opted for deep features extraction [<xref ref-type="bibr" rid="ref-29">29</xref>,<xref ref-type="bibr" rid="ref-30">30</xref>]; ii) Model shall be train on noisy and clean images to check the capability of designed CNN architecture [<xref ref-type="bibr" rid="ref-31">31</xref>&#x2013;<xref ref-type="bibr" rid="ref-33">33</xref>], and iii) More datasets will be utilized for the experimental process and consider reinforcement learning technique.</p>
</sec>
</body>
<back>
<ack>
<p>The authors are grateful to the ERAWAN project for high-performance computers. This research work was partially supported by Chiang Mai University and HITEC.</p>
</ack>
<fn-group>
<fn fn-type="other"><p><bold>Funding Statement:</bold> The authors received no specific funding for this study.</p></fn>
<fn fn-type="conflict"><p><bold>Conflicts of Interest:</bold> The authors declare that they have no conflicts of interest to report regarding the present study.</p></fn>
</fn-group>
<ref-list content-type="authoryear">
<title>References</title>
<ref id="ref-1"><label>[1]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Rehman</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Jingdong</surname></string-name>, <string-name><given-names>B.</given-names> <surname>Shahzad</surname></string-name>, <string-name><given-names>A. A.</given-names> <surname>Chandio</surname></string-name> and <string-name><given-names>I.</given-names> <surname>Hussain</surname></string-name></person-group>, &#x201C;<article-title>Economic perspectives of major field crops of Pakistan: An empirical study</article-title>,&#x201D; <source>Pacific Science Review B: Humanities and Social Sciences</source>, vol. <volume>1</volume>, no. <issue>2</issue>, pp. <fpage>145</fpage>&#x2013;<lpage>158</lpage>, <year>2015</year>.</mixed-citation></ref>
<ref id="ref-2"><label>[2]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>F.</given-names> <surname>Saeed</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Sharif</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Mittal</surname></string-name>, <string-name><given-names>L. M.</given-names> <surname>Goyal</surname></string-name> and <string-name><given-names>S.</given-names> <surname>Roy</surname></string-name></person-group>, &#x201C;<article-title>Deep neural network features fusion and selection based on PLS regression with an application for crops diseases classification</article-title>,&#x201D; <source>Applied Soft Computing</source>, vol. <volume>103</volume>, no. <issue>11</issue>, pp. <fpage>107164</fpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-3"><label>[3]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Sladojevic</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Arsenovic</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Anderla</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Culibrk</surname></string-name> and <string-name><given-names>D.</given-names> <surname>Stefanovic</surname></string-name></person-group>, &#x201C;<article-title>Deep neural networks based recognition of plant diseases by leaf image classification</article-title>,&#x201D; <source>Computational Intelligence and Neuroscience</source>, vol. <volume>16</volume>, no. <issue>5</issue>, pp. <fpage>1</fpage>&#x2013;<lpage>21</lpage>, <year>2016</year>.</mixed-citation></ref>
<ref id="ref-4"><label>[4]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J. M.</given-names> <surname>Cohen</surname></string-name>, <string-name><given-names>D. J.</given-names> <surname>Civitello</surname></string-name>, <string-name><given-names>M. D.</given-names> <surname>Venesky</surname></string-name>, <string-name><given-names>T. A.</given-names> <surname>McMahon</surname></string-name> and <string-name><given-names>J. R.</given-names> <surname>Rohr</surname></string-name></person-group>, &#x201C;<article-title>An interaction between climate change and infectious disease drove widespread amphibian declines</article-title>,&#x201D; <source>Global Change Biology</source>, vol. <volume>25</volume>, no. <issue>13</issue>, pp. <fpage>927</fpage>&#x2013;<lpage>937</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-5"><label>[5]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Kamilaris</surname></string-name> and <string-name><given-names>F. X.</given-names> <surname>Prenafeta-Bold&#x00FA;</surname></string-name></person-group>, &#x201C;<article-title>Deep learning in agriculture: A survey</article-title>,&#x201D; <source>Computers and Electronics in Agriculture</source>, vol. <volume>147</volume>, no. <issue>17</issue>, pp. <fpage>70</fpage>&#x2013;<lpage>90</lpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-6"><label>[6]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Y.</given-names> <surname>Zheng</surname></string-name>, <string-name><given-names>Q.</given-names> <surname>Zhu</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Huang</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Guo</surname></string-name> and <string-name><given-names>J.</given-names> <surname>Qin</surname></string-name></person-group>, &#x201C;<article-title>Maize and weed classification using color indices with support vector data description in outdoor fields</article-title>,&#x201D; <source>Computers and Electronics in Agriculture</source>, vol. <volume>141</volume>, no. <issue>26</issue>, pp. <fpage>215</fpage>&#x2013;<lpage>222</lpage>, <year>2017</year>.</mixed-citation></ref>
<ref id="ref-7"><label>[7]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>G.</given-names> <surname>Bhadane</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Sharma</surname></string-name> and <string-name><given-names>V. B.</given-names> <surname>Nerkar</surname></string-name></person-group>, &#x201C;<article-title>Early pest identification in agricultural crops using image processing techniques</article-title>,&#x201D; <source>International Journal of Electrical, Electronics and Computer Engineering</source>, vol. <volume>2</volume>, no. <issue>7</issue>, pp. <fpage>77</fpage>&#x2013;<lpage>82</lpage>, <year>2013</year>.</mixed-citation></ref>
<ref id="ref-8"><label>[8]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Adeel</surname></string-name>, <string-name><given-names>M. A.</given-names> <surname>Khan</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Akram</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Sharif</surname></string-name> and <string-name><given-names>T.</given-names> <surname>Saba</surname></string-name> <etal>et al.,</etal></person-group> &#x201C;<article-title>Entropy-controlled deep features selection framework for grape leaf diseases recognition</article-title>,&#x201D; <source>Expert Systems</source>, vol. <volume>11</volume>, no. <issue>3</issue>, pp. <fpage>1</fpage>&#x2013;<lpage>23</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-9"><label>[9]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>V.</given-names> <surname>Singh</surname></string-name> and <string-name><given-names>A. K.</given-names> <surname>Misra</surname></string-name></person-group>, &#x201C;<article-title>Detection of plant leaf diseases using image segmentation and soft computing techniques</article-title>,&#x201D; <source>Information Processing in Agriculture</source>, vol. <volume>4</volume>, no. <issue>4</issue>, pp. <fpage>41</fpage>&#x2013;<lpage>49</lpage>, <year>2017</year>.</mixed-citation></ref>
<ref id="ref-10"><label>[10]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Y.</given-names> <surname>Abbaspour</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Aghabara</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Davari</surname></string-name> and <string-name><given-names>J. M.</given-names> <surname>Maja</surname></string-name></person-group>, &#x201C;<article-title>Feasibility of using computer vision and artificial intelligence techniques in detection of some apple pests and diseases</article-title>,&#x201D; <source>Applied Sciences</source>, vol. <volume>12</volume>, no. <issue>11</issue>, pp. <fpage>906</fpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-11"><label>[11]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A. M.</given-names> <surname>Mostafa</surname></string-name>, <string-name><given-names>S. A.</given-names> <surname>Kumar</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Meraj</surname></string-name>, <string-name><given-names>H. T.</given-names> <surname>Rauf</surname></string-name> and <string-name><given-names>A. A.</given-names> <surname>Alnuaim</surname></string-name></person-group>, &#x201C;<article-title>Guava disease detection using deep convolutional neural networks: A case study of guava plants</article-title>,&#x201D; <source>Applied Sciences</source>, vol. <volume>12</volume>, no. <issue>26</issue>, pp. <fpage>239</fpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-12"><label>[12]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>R.</given-names> <surname>Saleem</surname></string-name>, <string-name><given-names>J. H.</given-names> <surname>Shah</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Sharif</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Yasmin</surname></string-name> and <string-name><given-names>J.</given-names> <surname>Cha</surname></string-name></person-group>, &#x201C;<article-title>Mango leaf disease recognition and classification using novel segmentation and vein pattern technique</article-title>,&#x201D; <source>Applied Sciences</source>, vol. <volume>11</volume>, no. <issue>4</issue>, pp. <fpage>11901</fpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-13"><label>[13]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Y.</given-names> <surname>Sun</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Liu</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Zhou</surname></string-name> and <string-name><given-names>H.</given-names> <surname>Hu</surname></string-name></person-group>, &#x201C;<article-title>Plant diseases identification through a discount momentum optimizer in deep learning</article-title>,&#x201D; <source>Applied Sciences</source>, vol. <volume>11</volume>, no. <issue>21</issue>, pp. <fpage>9468</fpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-14"><label>[14]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Z.</given-names> <surname>Iqbal</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Sharif</surname></string-name>, <string-name><given-names>J. H.</given-names> <surname>Shah</surname></string-name>, <string-name><given-names>M. H.</given-names> <surname>ur Rehman</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Javed</surname></string-name></person-group>, &#x201C;<article-title>An automated detection and classification of citrus plant diseases using image processing techniques: A review</article-title>,&#x201D; <source>Computers and Electronics in Agriculture</source>, vol. <volume>153</volume>, no. <issue>5</issue>, pp. <fpage>12</fpage>&#x2013;<lpage>32</lpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-15"><label>[15]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>I. D.</given-names> <surname>Raji</surname></string-name>, <string-name><given-names>H.</given-names> <surname>BelloSalau</surname></string-name>, <string-name><given-names>I. J.</given-names> <surname>Umoh</surname></string-name>, <string-name><given-names>A. J.</given-names> <surname>Onumanyi</surname></string-name> and <string-name><given-names>A. T.</given-names> <surname>Salawudeen</surname></string-name></person-group>, &#x201C;<article-title>Simple deterministic selection-based genetic algorithm for hyperparameter tuning of machine learning models</article-title>,&#x201D; <source>Applied Sciences</source>, vol. <volume>12</volume>, no. <issue>26</issue>, pp. <fpage>1186</fpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-16"><label>[16]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>D.</given-names> <surname>Bender</surname></string-name>, <string-name><given-names>D. J.</given-names> <surname>Licht</surname></string-name> and <string-name><given-names>C.</given-names> <surname>Nataraj</surname></string-name></person-group>, &#x201C;<article-title>A novel embedded feature selection and dimensionality reduction method for an SVM type classifier to predict periventricular leukomalacia (PVL) in neonates</article-title>,&#x201D; <source>Applied Sciences</source>, vol. <volume>11</volume>, no. <issue>2</issue>, pp. <fpage>11156</fpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-17"><label>[17]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J.</given-names> <surname>Ma</surname></string-name>, <string-name><given-names>K.</given-names> <surname>Du</surname></string-name>, <string-name><given-names>F.</given-names> <surname>Zheng</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Zhang</surname></string-name> and <string-name><given-names>Z.</given-names> <surname>Sun</surname></string-name></person-group>, &#x201C;<article-title>A recognition method for cucumber diseases using leaf symptom images based on deep convolutional neural network</article-title>,&#x201D; <source>Computers and Electronics in Agriculture</source>, vol. <volume>154</volume>, no. <issue>4</issue>, pp. <fpage>18</fpage>&#x2013;<lpage>24</lpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-18"><label>[18]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M. K.</given-names> <surname>Singh</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Chetia</surname></string-name> and <string-name><given-names>M.</given-names> <surname>Singh</surname></string-name></person-group>, &#x201C;<article-title>Detection and classification of plant leaf diseases in image processing using MATLAB</article-title>,&#x201D; <source>International Journal of Life Sciences Research</source>, vol. <volume>5</volume>, no. <issue>6</issue>, pp. <fpage>120</fpage>&#x2013;<lpage>124</lpage>, <year>2017</year>.</mixed-citation></ref>
<ref id="ref-19"><label>[19]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>V.</given-names> <surname>Sravan</surname></string-name>, <string-name><given-names>K.</given-names> <surname>Swaraj</surname></string-name>, <string-name><given-names>K.</given-names> <surname>Meenakshi</surname></string-name> and <string-name><given-names>P.</given-names> <surname>Kora</surname></string-name></person-group>, &#x201C;<article-title>A deep learning based crop disease classification using transfer learning</article-title>,&#x201D; <source>Materials Today: Proceedings</source>, vol. <volume>4</volume>, no. <issue>1</issue>, pp. <fpage>1</fpage>&#x2013;<lpage>10</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-20"><label>[20]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K. K.</given-names> <surname>Chakraborty</surname></string-name>, <string-name><given-names>R.</given-names> <surname>Mukherjee</surname></string-name>, <string-name><given-names>C.</given-names> <surname>Chakroborty</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Bora</surname></string-name></person-group>, &#x201C;<article-title>Automated recognition of optical image based potato leaf blight diseases using deep learning</article-title>,&#x201D; <source>Physiological and Molecular Plant Pathology</source>, vol. <volume>117</volume>, no. <issue>4</issue>, pp. <fpage>101781</fpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-21"><label>[21]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>N. E. M.</given-names> <surname>Khalifa</surname></string-name>, <string-name><given-names>M. H. N.</given-names> <surname>Taha</surname></string-name>, <string-name><given-names>A.</given-names> <surname>ElMaged</surname></string-name> and <string-name><given-names>A. E.</given-names> <surname>Hassanien</surname></string-name></person-group>, &#x201C;<article-title>Artificial intelligence in potato leaf disease classification: A deep learning approach</article-title>,&#x201D; <source>Machine Learning and Big Data Analytics Paradigms</source>, vol. <volume>5</volume>, no. <issue>12</issue>, pp. <fpage>63</fpage>&#x2013;<lpage>79</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-22"><label>[22]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J.</given-names> <surname>Rashid</surname></string-name>, <string-name><given-names>I.</given-names> <surname>Khan</surname></string-name>, <string-name><given-names>G.</given-names> <surname>Ali</surname></string-name>, <string-name><given-names>S. H.</given-names> <surname>Almotiri</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Masood</surname></string-name></person-group>, &#x201C;<article-title>Multi-level deep learning model for potato leaf disease recognition</article-title>,&#x201D; <source>Electronics</source>, vol. <volume>10</volume>, no. <issue>2</issue>, pp. <fpage>2064</fpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-23"><label>[23]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>R.</given-names> <surname>Thangaraj</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Anandamurugan</surname></string-name> and <string-name><given-names>V. K.</given-names> <surname>Kaliappan</surname></string-name></person-group>, &#x201C;<article-title>Automated tomato leaf disease classification using transfer learning-based deep convolution neural network</article-title>,&#x201D; <source>Journal of Plant Diseases and Protection</source>, vol. <volume>128</volume>, no. <issue>8</issue>, pp. <fpage>73</fpage>&#x2013;<lpage>86</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-24"><label>[24]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Zhang</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Zhang</surname></string-name>, <string-name><given-names>C.</given-names> <surname>Zhang</surname></string-name>, <string-name><given-names>X.</given-names> <surname>Wang</surname></string-name> and <string-name><given-names>Y.</given-names> <surname>Shi</surname></string-name></person-group>, &#x201C;<article-title>Cucumber leaf disease identification with global pooling dilated convolutional neural network</article-title>,&#x201D; <source>Computers and Electronics in Agriculture</source>, vol. <volume>162</volume>, no. <issue>13</issue>, pp. <fpage>422</fpage>&#x2013;<lpage>430</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-25"><label>[25]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S. P.</given-names> <surname>Mohanty</surname></string-name>, <string-name><given-names>D. P.</given-names> <surname>Hughes</surname></string-name> and <string-name><given-names>M.</given-names> <surname>Salath&#x00E9;</surname></string-name></person-group>, &#x201C;<article-title>Using deep learning for image-based plant disease detection</article-title>,&#x201D; <source>Frontiers in Plant Science</source>, vol. <volume>7</volume>, no. <issue>2</issue>, pp. <fpage>1419</fpage>, <year>2016</year>.</mixed-citation></ref>
<ref id="ref-26"><label>[26]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A. H.</given-names> <surname>Gandomi</surname></string-name>, <string-name><given-names>X.-S.</given-names> <surname>Yang</surname></string-name> and <string-name><given-names>A. H.</given-names> <surname>Alavi</surname></string-name></person-group>, &#x201C;<article-title>Cuckoo search algorithm: A metaheuristic approach to solve structural optimization problems</article-title>,&#x201D; <source>Engineering with Computers</source>, vol. <volume>29</volume>, no. <issue>4</issue>, pp. <fpage>17</fpage>&#x2013;<lpage>35</lpage>, <year>2013</year>.</mixed-citation></ref>
<ref id="ref-27"><label>[27]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>N.</given-names> <surname>Hussain</surname></string-name>, <string-name><given-names>U.</given-names> <surname>Tariq</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Kadry</surname></string-name>, <string-name><given-names>M. A. E.</given-names> <surname>Yar</surname></string-name> and <string-name><given-names>A. M.</given-names> <surname>Mostafa</surname></string-name></person-group>, &#x201C;<article-title>Multiclass cucumber leaf diseases recognition using best feature selection</article-title>,&#x201D; <source>Computers, Material and Continua</source>, vol. <volume>70</volume>, no. <issue>1</issue>, pp. <fpage>3281</fpage>&#x2013;<lpage>3294</lpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-28"><label>[28]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M. A.</given-names> <surname>Khan</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Alqahtani</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Khan</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Alsubai</surname></string-name> and <string-name><given-names>A.</given-names> <surname>Binbusayyis</surname></string-name></person-group>, &#x201C;<article-title>Cucumber leaf diseases recognition using multi level deep entropy-elm feature selection</article-title>,&#x201D; <source>Applied Sciences</source>, vol. <volume>12</volume>, no. <issue>14</issue>, pp. <fpage>593</fpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-29"><label>[29]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Aqeel</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Hassan</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Rehman</surname></string-name>, <string-name><given-names>U.</given-names> <surname>Tariq</surname></string-name> and <string-name><given-names>S.</given-names> <surname>Kadry</surname></string-name></person-group>, &#x201C;<article-title>A long short-term memory biomarker-based prediction framework for Alzheimer&#x2019;s disease</article-title>,&#x201D; <source>Sensors</source>, vol. <volume>22</volume>, no. <issue>7</issue>, pp. <fpage>1475</fpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-30"><label>[30]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>F.</given-names> <surname>Afza</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Sharif</surname></string-name>, <string-name><given-names>U.</given-names> <surname>Tariq</surname></string-name>, <string-name><given-names>H. S.</given-names> <surname>Yong</surname></string-name> and <string-name><given-names>J.</given-names> <surname>Cha</surname></string-name></person-group>, &#x201C;<article-title>Multiclass skin lesion classification using hybrid deep features selection and extreme learning machine</article-title>,&#x201D; <source>Sensors</source>, vol. <volume>22</volume>, no. <issue>3</issue>, pp. <fpage>799</fpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-31"><label>[31]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Jabeen</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Alhaisoni</surname></string-name>, <string-name><given-names>U.</given-names> <surname>Tariq</surname></string-name>, <string-name><given-names>Y. D.</given-names> <surname>Zhang</surname></string-name> and <string-name><given-names>A.</given-names> <surname>Hamza</surname></string-name></person-group>, &#x201C;<article-title>Breast cancer classification from ultrasound images using probability-based optimal deep learning feature fusion</article-title>,&#x201D; <source>Sensors</source>, vol. <volume>22</volume>, no. <issue>11</issue>, pp. <fpage>807</fpage>, <year>2022</year>.</mixed-citation></ref>
<ref id="ref-32"><label>[32]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Muhammad</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Sharif</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Akram</surname></string-name> and <string-name><given-names>S.</given-names> <surname>Kadry</surname></string-name></person-group>, &#x201C;<article-title>Intelligent fusion-assisted skin lesion localization and classification for smart healthcare</article-title>,&#x201D; <source>Neural Computing and Applications</source>, vol. <volume>4</volume>, no. <issue>2</issue>, pp. <fpage>1</fpage>&#x2013;<lpage>16</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-33"><label>[33]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Sharif</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Akram</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Kadry</surname></string-name> and <string-name><given-names>C. H.</given-names> <surname>Hsu</surname></string-name></person-group>, &#x201C;<article-title>A two-stream deep neural network-based intelligent system for complex skin cancer types classification</article-title>,&#x201D; <source>International Journal of Intelligent Systems</source>, vol. <volume>1</volume>, no. <issue>3</issue>, pp. <fpage>1</fpage>&#x2013;<lpage>23</lpage>, <year>2021</year>.</mixed-citation></ref>
</ref-list>
</back>
</article>