<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.1 20151215//EN" "http://jats.nlm.nih.gov/publishing/1.1/JATS-journalpublishing1.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="1.1">
<front>
<journal-meta>
<journal-id journal-id-type="pmc">IASC</journal-id>
<journal-id journal-id-type="nlm-ta">IASC</journal-id>
<journal-id journal-id-type="publisher-id">IASC</journal-id>
<journal-title-group>
<journal-title>Intelligent Automation &#x0026; Soft Computing</journal-title>
</journal-title-group>
<issn pub-type="epub">2326-005X</issn><issn pub-type="ppub">1079-8587</issn>
<publisher>
<publisher-name>Tech Science Press</publisher-name>
<publisher-loc>USA</publisher-loc>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">18039</article-id>
<article-id pub-id-type="doi">10.32604/iasc.2021.018039</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Article</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Machine Learning-based Detection and Classification of Walnut Fungi Diseases</article-title>
<alt-title alt-title-type="left-running-head">Machine Learning-based Detection and Classification of Walnut Fungi Diseases</alt-title>
<alt-title alt-title-type="right-running-head">Machine Learning-based Detection and Classification of Walnut Fungi Diseases</alt-title>
</title-group>
<contrib-group content-type="authors">
<contrib id="author-1" contrib-type="author">
<name name-style="western">
<surname>Khan</surname>
<given-names>Muhammad Alyas</given-names>
</name>
<xref ref-type="aff" rid="aff-1">1</xref>
</contrib>
<contrib id="author-2" contrib-type="author">
<name name-style="western">
<surname>Ali</surname>
<given-names>Mushtaq</given-names>
</name>
<xref ref-type="aff" rid="aff-1">1</xref>
</contrib>
<contrib id="author-3" contrib-type="author">
<name name-style="western">
<surname>Shah</surname>
<given-names>Mohsin</given-names>
</name>
<xref ref-type="aff" rid="aff-2">2</xref>
</contrib>
<contrib id="author-4" contrib-type="author">
<name name-style="western">
<surname>Mahmood</surname>
<given-names>Toqeer</given-names>
</name>
<xref ref-type="aff" rid="aff-3">3</xref>
</contrib>
<contrib id="author-5" contrib-type="author">
<name name-style="western">
<surname>Ahmad</surname>
<given-names>Muneer</given-names>
</name>
<xref ref-type="aff" rid="aff-4">4</xref>
</contrib>
<contrib id="author-6" contrib-type="author">
<name name-style="western">
<surname>Jhanjhi</surname>
<given-names>NZ</given-names>
</name>
<xref ref-type="aff" rid="aff-5">5</xref>
</contrib>
<contrib id="author-7" contrib-type="author" corresp="yes">
<name name-style="western">
<surname>Bhuiyan</surname>
<given-names>Mohammad Arif Sobhan</given-names>
</name>
<xref ref-type="aff" rid="aff-6">6</xref>
<email>arifsobhan.bhuiyan@xmu.edu.my</email>
</contrib>
<contrib id="author-8" contrib-type="author">
<name name-style="western">
<surname>Jaha</surname>
<given-names>Emad Sami</given-names>
</name>
<xref ref-type="aff" rid="aff-7">7</xref>
</contrib>
<aff id="aff-1">
<label>1</label><institution>Department of Information Technology, Hazara University</institution>, <addr-line>Mansehra</addr-line>, <country>Pakistan</country></aff>
<aff id="aff-2">
<label>2</label><institution>Department of Telecommunication, Hazara University</institution>, <addr-line>Mansehra</addr-line>, <country>Pakistan</country></aff>
<aff id="aff-3">
<label>3</label><institution>Department of Computer Science, National Textile University</institution>, <addr-line>Faisalabad</addr-line>, <country>Pakistan</country></aff>
<aff id="aff-4">
<label>4</label><institution>Department of Information Systems, Faculty of Computer Science &#x0026; Information Technology, Universiti Malaya</institution>, <addr-line>50603, Kuala Lumpur</addr-line>, <country>Malaysia</country></aff>
<aff id="aff-5">
<label>5</label><institution>School of Computer Science and Engineering, SCE, Taylor&#x2019;s University</institution>, <addr-line>Subang Jaya</addr-line>, <country>Malaysia</country></aff>
<aff id="aff-6">
<label>6</label><institution>Electrical and Electronics Engineering, Xiamen University Malaysia</institution>, <addr-line>Bandar Sunsuria, 43900 Sepang, Selangor</addr-line>, <country>Malaysia</country></aff>
<aff id="aff-7">
<label>7</label><institution>Department of Computer Science, Faculty of Computing and Information Technology, King Abdulaziz University</institution>, <addr-line>Jeddah, 21589</addr-line>, <country>Saudi Arabia</country></aff>
</contrib-group><author-notes><corresp id="cor1">&#x002A;Corresponding Author: Mohammad Arif Sobhan Bhuiyan. Email: 
<email>arifsobhan.bhuiyan@xmu.edu.my</email></corresp></author-notes>
<pub-date pub-type="epub" date-type="pub" iso-8601-date="2021-08-11">
<day>11</day>
<month>8</month>
<year>2021</year>
</pub-date>
<volume>30</volume>
<issue>3</issue>
<fpage>771</fpage>
<lpage>785</lpage>
<history>
<date date-type="received">
<day>22</day>
<month>2</month>
<year>2021</year>
</date>
<date date-type="accepted">
<day>09</day>
<month>5</month>
<year>2021</year>
</date>
</history>
<permissions>
<copyright-statement>&#x00A9; 2021 Khan et al.</copyright-statement>
<copyright-year>2021</copyright-year>
<copyright-holder>Khan et al.</copyright-holder>
<license xlink:href="https://creativecommons.org/licenses/by/4.0/">
<license-p>This work is licensed under a <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution 4.0 International License</ext-link>, which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.</license-p>
</license>
</permissions>
<self-uri content-type="pdf" xlink:href="TSP_IASC_18039.pdf"></self-uri>
<abstract>
<p>Fungi disease affects walnut trees worldwide because it damages the canopies of the trees and can easily spread to neighboring trees, resulting in low quality and less yield. The fungal disease can be treated relatively easily, and the main goal is preventing its spread by automatic early-detection systems. Recently, machine learning techniques have achieved promising results in many applications in the agricultural field, including plant disease detection. In this paper, an automatic machine learning-based detection method for identifying walnut diseases is proposed. The proposed method first resizes a leaf&#x2019;s input image and pre-processes it using intensity adjustment and histogram equalization. After that, the detected infected area of the leaf is segmented using the Otsu thresholding algorithm. The proposed method extracts color and shape features from the leaf&#x2019;s segmented area using the gray level co-occurrence matrix (GLCM) and color moments. Finally, the extracted features are provided to the back-propagation neural network (BPNN) classifier to detect and classify walnut leaf diseases. Experimental results demonstrate that the proposed method&#x2019;s detection accuracy is 95.3%, which is significantly higher than those of the state-of-the-art techniques. The proposed method assists farmers in detecting diseases affecting walnut trees and thus enables them to generate more revenue by improving the productivity and quality of their walnuts.</p>
</abstract>
<kwd-group kwd-group-type="author">
<kwd>Machine learning</kwd>
<kwd>walnut disease</kwd>
<kwd>fungi disease</kwd>
<kwd>color image features</kwd>
<kwd>texture image features</kwd>
</kwd-group>
</article-meta>
</front>
<body>
<sec id="s1">
<label>1</label>
<title>Introduction</title>
<p>Agriculture plays a vital role in the economy and general development of a country. Particularly, in Pakistan, agriculture represents the second-largest branch of the economy, contributing to more than 21% of the gross domestic products (GDP), and 62% of the country&#x2019;s population earn their livelihood in the agriculture sector [<xref ref-type="bibr" rid="ref-1">1</xref>,<xref ref-type="bibr" rid="ref-2">2</xref>]. Currently, fungal diseases are a significant problem in the agriculture field worldwide, substantially decreasing the quantity and quality of agricultural products. Therefore, the fungal diseases that affect trees, particularly walnut trees, must be cured. The Khyber Pakhtunkhwa province of Pakistan has 226 indigenous types of walnuts, mainly infected by fungal diseases [<xref ref-type="bibr" rid="ref-3">3</xref>]. Walnut production declined sharply in Pakistan from 2000 to 2017 because tree diseases, such as anthracnose, leaf blotch, and bacteria blight, affected the walnut trees.</p>
<p>Anthracnose is the most well-known leaf disease of walnut trees, and it is caused by the fungus Gnomonia Leptospira. The Gnomonic Leptostyla spreads due to rain in the spring season. The anthracnose disease reduces the size, mass, and actual crop of nuts and the falling of leaves before time. The first symptom of the anthracnose disease on a walnut leaf is circular brown lesions on the leaves. Initially, this symptom is visible only on the leaf&#x2019;s underside, which eventually spreads on both the upper and the lower leaf surfaces as time passes. Walnut anthracnose affects only walnuts and butternuts, which belong to the same genus of Juglans. There are also forms of anthracnose that can cause damage to maples, oaks, shade trees, and other plants, such as tomatoes, beans, cucumbers, and squash during the plant growing season [<xref ref-type="bibr" rid="ref-4">4</xref>,<xref ref-type="bibr" rid="ref-5">5</xref>].</p>
<p>Walnut leaf blotch is caused by the fungus Marssonina Juglandis, which changes in severity yearly. Initially, small, round, brown-color spots of a few millimeters in size appear on a leaf surface, and then these spots merge to larger blotches. The symptoms of anthracnose and leaf blotch on a walnut leaf are shown in <xref ref-type="fig" rid="fig-1">Fig. 1</xref>.</p>
<p><xref ref-type="fig" rid="fig-1">Fig. 1</xref> shows the symptoms of anthracnose and leaf blotch on a walnut leaf.</p>
<fig id="fig-1">
<label>Figure 1</label>
<caption>
<title>(a) Leaf image containing the symptom of anthracnose (b) Leaf image containing the symptom of blotch</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_18039-fig-1.png"/>
</fig>
<p>Most of the existing walnut disease detection methods are based on naked-eye observation. However, the naked eye-based detection of diseases is time-consuming and prone to errors. Therefore, it is crucial to detect the disease on time. An automatic walnut disease detection system can be used to detect the disease in its early stage. In the past years, machine learning techniques have played a significant role in the agricultural field. To improve the accuracy and speed of diagnostic results, many machine learning-based algorithms have been used, such as <italic>k</italic>-mean cluster, support vector machine (SVM), <italic>K</italic>-nearest neighbor, naive Bayesian, and artificial neural networks (ANNs) [<xref ref-type="bibr" rid="ref-6">6</xref>,<xref ref-type="bibr" rid="ref-7">7</xref>]. This paper proposes an automatic detection method of walnut diseases, such as anthracnose and leaf blotch. The proposed method is deployed using a machine learning approach. The proposed model takes a walnut leaf image as an input to predict and identify diseases precisely.</p>
<p>The main contributions of this study can be summarized as follows:<list list-type="bullet"><list-item>
<p>A back-propagation neural network (BPNN) model is proposed to identify and classify walnut leaf diseases.</p></list-item><list-item>
<p>A leaf disease dataset, including the anthracnose and leaf blotch diseases, is designed to train and test the BPNN model.</p></list-item><list-item>
<p>The BPNN is compared with a multi-support vector machine (mSVM) in terms of accuracy, and the result shows that the BPNN model is comparatively better than the mSVM.</p></list-item></list></p>
</sec>
<sec id="s2">
<label>2</label>
<title>Related Work</title>
<p>Leaf disease identification has been a crucial problem and a significant concern in the agricultural sector for a long time. Machine learning algorithms have been widely used for disease detection in the agriculture field in recent years.</p>
<p>Khalesi et al. [<xref ref-type="bibr" rid="ref-8">8</xref>] used an automatic system to classify the Kaghazi and Sangi genotypes of Iranian walnuts. The features were extracted using the fast Fourier transform (FFT), and the principal component analysis (PCA) was applied to the extracted features. Finally, a multilayer feedforward neural network was used for classification. This method&#x2019;s detection accuracies of Sangi and Kaghazi genotypes were 99.64% and 96.56%, respectively.</p>
<p>Chuanlei et al. [<xref ref-type="bibr" rid="ref-6">6</xref>] developed an automatic method to diagnose apple leaf diseases using image processing techniques and pattern recognition methods. The RGB image was first converted to the HSI gray image as a pre-processing step. The image of an infected leaf was segmented by the region growing algorithm (RGA). Then, the RGA extracted features from the leaf image segmented area, including texture, shape, and color features. Finally, the SVM classifier was used for classification and detection, achieving high accuracy of 90%. Tigadi et al. [<xref ref-type="bibr" rid="ref-9">9</xref>] proposed an automatic method for detecting banana plant diseases, such as Yellow Sigatoka, Black Sigatoka, Panama Wilt, Bunchy top, and Streak virus, by applying the image processing techniques. Initially, images of banana leaves with various diseases were captured by a digital camera. The pre-processing techniques, such as image resizing, cropping, and color conversion, were used. Further, two types of features, the color features and the template&#x2019;s histogram (HOT), were extracted. Finally, the banana diseases were classified by the trained artificial neural network. Bhange et al. [<xref ref-type="bibr" rid="ref-10">10</xref>] used a web-based tool for the identification of pomegranate fruit disease. First, a leaf image was resized, and then features, such as morphology, color, and concave-convex variation (CCV), were extracted. Next, the disease area was segmented by the <italic>k</italic>-means cluster algorithm. Finally, the SVM was used for classification and detection. The proposed system achieved an accuracy of 82% in pomegranate disease identification. Waghmare et al. [<xref ref-type="bibr" rid="ref-11">11</xref>] proposed an automatic system for detecting major grape diseases, such as downy mildew and black rot, from a grape leaf image. The pre-processing steps were applied to the input image to make the image suitable for further processing. Additionally, the background was removed from the image, and the RGB color space was converted to the HSV color space. Further, the affected area was segmented from the leaf image, and the texture, color, shape, and edge features were extracted by the first- and second-order statistical methods and the gray level co-occurrence matrix (GLCM). Finally, the extracted features were processed by the SVM classifier for classification. Detection accuracy of 96% was achieved. Awate et al. [<xref ref-type="bibr" rid="ref-12">12</xref>] proposed a fruit disease detection and diagnosis method based on image processing techniques. In this method, the <italic>K</italic>-means clustering algorithm was used for image segmentation, and the color, morphology, and texture were extracted from the segmented image. Finally, an ANN-based classifier was used to identify and classify fruit diseases. Kusumandari et al. [<xref ref-type="bibr" rid="ref-13">13</xref>] presented a strawberry plant disease detection method. In their method, the input image quality was improved by pre-processing, and the RGB color space was converted into the HSV color space. After that, the regional method was used for the segmentation of the infected area of plant leaves. Detection accuracy of 85% was achieved.</p>
<p>Areni et al. [<xref ref-type="bibr" rid="ref-14">14</xref>] introduced an early detection image processing-based method of symptoms of pest attacks on cocoa fruits. First, the pre-processing step was conducted to enhance the input image quality and to convert the RGB model into the grayscale model. Further, image features were extracted using the Gabor kernel and stored in the database for the comparison of the test sample. The results show 70% accuracy on the testing dataset.</p>
</sec>
<sec id="s3">
<label>3</label>
<title>Proposed Method</title>
<p>The flowchart of the proposed method is shown in <xref ref-type="fig" rid="fig-2">Fig. 2</xref>. The proposed method uses a leaf image as an input and pre-processes it to enhance the image contrast. Next, it segments the leaf image using the image segmentation technique. The image features are then extracted from the segmented image and fed to the classifier to identify and categorize walnut leaf diseases. The proposed architecture consists of five modules. The details of each of the three modules are given in the following.</p>
<fig id="fig-2">
<label>Figure 2</label>
<caption>
<title>Architecture of our proposed technique</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_18039-fig-2.png"/>
</fig>
<sec id="s3_1">
<label>3.1</label>
<title>Image Pre-processing Module</title>
<p>This module aims to resize an RGB leaf image, improve its contrast, and transform the enhanced RGB image into the YUV color space. The proposed method requires images of the same size, whereas raw input image size may vary. In addition, training performs faster on smaller images. Therefore, all raw images are resized to 256 &#x00D7; 256 pixels, as shown in <xref ref-type="fig" rid="fig-3">Fig. 3</xref>. After the image resizing process, two techniques are separately applied to enhance image contrast, intensity adjustment, and histogram equalization technique. The results of the two enhancement techniques are shown in <xref ref-type="fig" rid="fig-4">Fig. 4</xref>, where it can be seen that the result of the histogram equalization technique is better than the intensity adjustment. Therefore, histogram equalization is adopted to eliminate the noise and enhance image features of the leaf&#x2019;s surface, such as line, edges, and maladies particles of the leaf. Next, the enhanced RGB color image is converted into a YUV color space using <xref ref-type="disp-formula" rid="eqn-1">Eqs. (1)</xref>&#x2013;<xref ref-type="disp-formula" rid="eqn-3">(3)</xref>. The results of the RBG-to-YUV color transformation of the walnut leaf images are presented in <xref ref-type="fig" rid="fig-5">Fig. 5</xref>.</p>

<fig id="fig-3">
<label>Figure 3</label>
<caption>
<title>Resize image</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_18039-fig-3.png"/>
</fig>
<fig id="fig-4">
<label>Figure 4</label>
<caption>
<title>(a) Result of intensity adjustment (b) Result of histogram equalization</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_18039-fig-4.png"/>
</fig>
<fig id="fig-5">
<label>Figure 5</label>
<caption>
<title>(a) YUV color image (b) Y channel (c) U channel (d) V channel</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_18039-fig-5.png"/>
</fig><p><disp-formula id="eqn-1">
<label>(1)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-1.png"/><tex-math id="tex-eqn-1"><![CDATA[$$\matrix{ {{\rm{Y}} = 0.299{\rm{R}} + 0.587{\rm{G}} + 0.114{\rm{B}} } \cr }$$]]></tex-math>--><mml:math id="mml-eqn-1" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mrow><mml:mrow><mml:mi mathvariant="normal">Y</mml:mi></mml:mrow></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:mn>0.299</mml:mn><mml:mrow><mml:mrow><mml:mi mathvariant="normal">R</mml:mi></mml:mrow></mml:mrow><mml:mo>&#x002B;</mml:mo><mml:mn>0.587</mml:mn><mml:mrow><mml:mrow><mml:mi mathvariant="normal">G</mml:mi></mml:mrow></mml:mrow><mml:mo>&#x002B;</mml:mo><mml:mn>0.114</mml:mn><mml:mrow><mml:mrow><mml:mi mathvariant="normal">B</mml:mi></mml:mrow></mml:mrow></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<p><disp-formula id="eqn-2">
<label>(2)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-2.png"/><tex-math id="tex-eqn-2"><![CDATA[$$\matrix{ {{\rm{U}} = 0.492\left( {{\rm{B}} - {\rm{Y}}} \right)} \cr }$$]]></tex-math>--><mml:math id="mml-eqn-2" display="block"><mml:mtable columnspacing="1em" rowspacing="4pt"><mml:mtr><mml:mtd><mml:mrow><mml:mrow><mml:mrow><mml:mi mathvariant="normal">U</mml:mi></mml:mrow></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:mn>0.492</mml:mn><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:mrow><mml:mi mathvariant="normal">B</mml:mi></mml:mrow></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mrow><mml:mi mathvariant="normal">Y</mml:mi></mml:mrow></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<p><disp-formula id="eqn-3">
<label>(3)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-3.png"/><tex-math id="tex-eqn-3"><![CDATA[$$\matrix{ {{\rm{V}} = 0.877\left( {{\rm{R}} - {\rm{Y}}} \right) } \cr }$$]]></tex-math>--><mml:math id="mml-eqn-3" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mrow><mml:mrow><mml:mi mathvariant="normal">V</mml:mi></mml:mrow></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:mn>0.877</mml:mn><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:mrow><mml:mi mathvariant="normal">R</mml:mi></mml:mrow></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mrow><mml:mi mathvariant="normal">Y</mml:mi></mml:mrow></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Image Segmentation Module</title>
<p>This module aims to segment the infected regions in the V channel of a YUV leaf image. Different methods, including the <italic>k</italic>-means clustering, fuzzy algorithm, region base, convolution neural network, wavelength transform, and thresholding algorithm, have been used for image segmentation [<xref ref-type="bibr" rid="ref-15">15</xref>]. In the proposed method, the infected regions in a leaf image are segmented using the Otsu thresholding algorithm [<xref ref-type="bibr" rid="ref-16">16</xref>]. This algorithm restores a single intensity range that separates pixels into two classes: foreground and background [<xref ref-type="bibr" rid="ref-17">17</xref>,<xref ref-type="bibr" rid="ref-18">18</xref>]. The input image, that is, the V channel obtained from the YUV model, and its segmented image are shown in <xref ref-type="fig" rid="fig-6">Fig. 6</xref>.</p>
<fig id="fig-6">
<label>Figure 6</label>
<caption>
<title>(a) Input image and (b) Result of segmentation</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_18039-fig-6.png"/>
</fig>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Feature Extraction Module</title>
<p>In pattern recognition and image processing, a feature extraction represents a special form of dimensionality reduction. The main feature extraction goal is to obtain the most relevant information from the original data represented in a lower dimensionality space. The input data are transformed into a reduced representation set of features named the feature vector. In this process, relevant features are extracted from objects to form the feature vectors. Then, classifiers use the feature vectors to recognize the input unit with the target output unit. There are various features in the input images, and they are extracted to detect and classify leaf diseases. A feature often contains data relative to color, shape, context, or texture. In this paper, the color, texture, and shape features are extracted from the leaf images&#x2019; segmented affected area.</p>
<sec id="s3_3_1">
<label>3.3.1</label>
<title>Color Features</title>
<p>A color feature is one of the most widely used features in plant disease detection. The human vision system is more sensitive to color information than to the gray surface. The color of an image can be represented through a few color models. The most commonly used color models are RGB, HSV, and YUV. The color feature can be described by color histogram [<xref ref-type="bibr" rid="ref-19">19</xref>], color correlogram, and a color moment [<xref ref-type="bibr" rid="ref-20">20</xref>]. In this paper, the color moment is used to represent color features from the V channel of the YUV model. The color moments include the mean, standard deviation, skewness, variance, kurtosis, and inverse difference moment (IDM). The extracted color features of the image presented in <xref ref-type="fig" rid="fig-6">Fig. 6(b)</xref> are illustrated in <xref ref-type="table" rid="table-1">Tab. 1</xref>. Following are the features that are extracted from the input images.</p>
<table-wrap id="table-1">
<label>Table 1</label>
<caption>
<title>Color features extracted from <xref ref-type="fig" rid="fig-7">Fig. 7(b)</xref></title>
</caption>
<table>
<colgroup>
<col/>
<col/>
<col/>
<col/>
<col/>
<col/>
<col/>
<col/>
<col/>
<col/>
</colgroup>
<thead>
<tr>
<th>Features</th>
<th>Mean</th>
<th>S. Deviation</th>
<th>Skewness</th>
<th>Variance</th>
<th>Kurtosis</th>
<th>IDM</th><th colspan="2">Smoothness</th>
<th>RMS</th>
</tr>
</thead>
<tbody>
<tr>
<td>Values</td>
<td>0.002</td>
<td>0.041</td>
<td>0.928</td>
<td>0.001</td>
<td>4.13</td>
<td>&#x2013;11.20</td>
<td>0.993</td>
<td colspan="2">0.041</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The mean of an image denotes the average color in an image, which can be computed as follows:</p>
<p><disp-formula id="eqn-4">
<label>(4)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-4.png"/><tex-math id="tex-eqn-4"><![CDATA[$$\matrix{ {\mu = \sum\limits_{m,n = 0}^{N - 1} {{1 \over N}} \left( {{p_{mn}}} \right), } \cr }$$]]></tex-math>--><mml:math id="mml-eqn-4" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>&#x03BC;</mml:mi><mml:mo>&#x003D;</mml:mo><mml:munderover><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>m</mml:mi><mml:mo>,</mml:mo><mml:mi>n</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mrow><mml:mrow><mml:mfrac><mml:mn>1</mml:mn><mml:mi>N</mml:mi></mml:mfrac></mml:mrow></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>p</mml:mi><mml:mrow><mml:mi>m</mml:mi><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>,</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where <italic>N</italic> denotes the total number of pixels in the image, <inline-formula id="ieqn-1">
<!--<alternatives><inline-graphic xlink:href="ieqn-1.tif"/><tex-math id="tex-ieqn-1"><![CDATA[${p_{mn}}$]]></tex-math>--><mml:math id="mml-ieqn-1"><mml:mrow><mml:msub><mml:mi>p</mml:mi><mml:mrow><mml:mi>m</mml:mi><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> denotes the value of a pixel located at <inline-formula id="ieqn-2">
<!--<alternatives><inline-graphic xlink:href="ieqn-2.tif"/><tex-math id="tex-ieqn-2"><![CDATA[$(m,n)$]]></tex-math>--><mml:math id="mml-ieqn-2"><mml:mo stretchy="false">(</mml:mo><mml:mi>m</mml:mi><mml:mo>,</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math>
<!--</alternatives>--></inline-formula> of the image, and <inline-formula id="ieqn-3">
<!--<alternatives><inline-graphic xlink:href="ieqn-3.tif"/><tex-math id="tex-ieqn-3"><![CDATA[$\mu$]]></tex-math>--><mml:math id="mml-ieqn-3"><mml:mi>&#x03BC;</mml:mi></mml:math>
<!--</alternatives>--></inline-formula> represents the mean value.</p>
<p><bold>Standard deviation</bold> The variance or deviation between pixels of an input image is represented with the standard deviation, which can be computed by taking the square root of the variance of the color distribution, which is calculated as follows:</p>
<p><disp-formula id="eqn-5">
<label>(5)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-5.png"/><tex-math id="tex-eqn-5"><![CDATA[$$\matrix{ {\sigma = \sqrt {{1 \over N}\sum\limits_{m,n = 0}^{N - 1} {{{({p_{mn}} - \mu )}^2}} } , } \cr }$$]]></tex-math>--><mml:math id="mml-eqn-5" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>&#x03C3;</mml:mi><mml:mo>&#x003D;</mml:mo><mml:msqrt><mml:mrow><mml:mfrac><mml:mn>1</mml:mn><mml:mi>N</mml:mi></mml:mfrac></mml:mrow><mml:munderover><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>m</mml:mi><mml:mo>,</mml:mo><mml:mi>n</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mrow><mml:mrow><mml:msup><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mi>p</mml:mi><mml:mrow><mml:mi>m</mml:mi><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>&#x03BC;</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:mrow></mml:msqrt><mml:mo>,</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where <italic>&#x03C3;</italic> denotes the standard deviation.</p>
<p>The skewness is a measure of the degree of asymmetry mean probability distribution, which provides the information on the color distribution shape. It can be computed as follows:</p>
<p><disp-formula id="eqn-6">
<label>(6)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-6.png"/><tex-math id="tex-eqn-6"><![CDATA[$$\matrix{ {S = {1 \over {{\sigma ^3}}}\sum\limits_{m,n = 0}^{N - 1} {{{\left( {{p_{mn}} - \mu } \right)}^3}} . } \cr }$$]]></tex-math>--><mml:math id="mml-eqn-6" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>S</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mrow><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:mrow><mml:msup><mml:mi>&#x03C3;</mml:mi><mml:mn>3</mml:mn></mml:msup></mml:mrow></mml:mrow></mml:mfrac></mml:mrow><mml:munderover><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>m</mml:mi><mml:mo>,</mml:mo><mml:mi>n</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mrow><mml:mrow><mml:msup><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>p</mml:mi><mml:mrow><mml:mi>m</mml:mi><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>&#x03BC;</mml:mi></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mn>3</mml:mn></mml:msup></mml:mrow></mml:mrow><mml:mo>.</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>The variance is used as a measure of the gray level contrast to establish the relative component descriptors and is calculated as follows:</p>
<p><disp-formula id="eqn-7">
<label>(7)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-7.png"/><tex-math id="tex-eqn-7"><![CDATA[$$\matrix{ {\sigma _m^2 = \sum\limits_{m,n = 0}^{N - 1} {{p_{mn}}} {{\left( {m - {\mu _m}} \right)}^2},\sigma _n^2 = \sum\limits_{m,n = 0}^{N - 1} {{p_{mn}}} {{\left( {n - {\mu _n}} \right)}^2}. } \cr }$$]]></tex-math>--><mml:math id="mml-eqn-7" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:msubsup><mml:mi>&#x03C3;</mml:mi><mml:mi>m</mml:mi><mml:mn>2</mml:mn></mml:msubsup><mml:mo>&#x003D;</mml:mo><mml:munderover><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>m</mml:mi><mml:mo>,</mml:mo><mml:mi>n</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mrow><mml:mrow><mml:msub><mml:mi>p</mml:mi><mml:mrow><mml:mi>m</mml:mi><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mrow><mml:msup><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>m</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mi>&#x03BC;</mml:mi><mml:mi>m</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow><mml:mo>,</mml:mo><mml:msubsup><mml:mi>&#x03C3;</mml:mi><mml:mi>n</mml:mi><mml:mn>2</mml:mn></mml:msubsup><mml:mo>&#x003D;</mml:mo><mml:munderover><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>m</mml:mi><mml:mo>,</mml:mo><mml:mi>n</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mrow><mml:mrow><mml:msub><mml:mi>p</mml:mi><mml:mrow><mml:mi>m</mml:mi><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mrow><mml:msup><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mi>&#x03BC;</mml:mi><mml:mi>n</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow><mml:mo>.</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>The kurtosis denotes a measure of the peak value of the real-valued random variable; it shapes the descriptor of a probability distribution and can be calculated as follows:</p>
<p><disp-formula id="eqn-8">
<label>(8)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-8.png"/><tex-math id="tex-eqn-8"><![CDATA[$$\matrix{ {K = {1 \over {{\sigma ^4}}}\sum\limits_{m,n = 0}^{N - 1} {{{\left( {{p_{mn}} - \mu } \right)}^4}} . } \cr }$$]]></tex-math>--><mml:math id="mml-eqn-8" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>K</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mrow><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:mrow><mml:msup><mml:mi>&#x03C3;</mml:mi><mml:mn>4</mml:mn></mml:msup></mml:mrow></mml:mrow></mml:mfrac></mml:mrow><mml:munderover><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>m</mml:mi><mml:mo>,</mml:mo><mml:mi>n</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mrow><mml:mrow><mml:msup><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>p</mml:mi><mml:mrow><mml:mi>m</mml:mi><mml:mi>n</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>&#x03BC;</mml:mi></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mn>4</mml:mn></mml:msup></mml:mrow></mml:mrow><mml:mo>.</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>The <italic>IDM</italic> is inversely related to the contrast measure. For similar pixel values, the IDM value is high. Its value can be calculated as follows:</p>
<p><disp-formula id="eqn-9">
<label>(9)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-9.png"/><tex-math id="tex-eqn-9"><![CDATA[$$\matrix{ {IDM = \sum\limits_m {\sum\limits_n {{1 \over {1 + {{\left( {m - n} \right)}^2}}}} } p\left( {m,n} \right). } \cr }$$]]></tex-math>--><mml:math id="mml-eqn-9" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>I</mml:mi><mml:mi>D</mml:mi><mml:mi>M</mml:mi><mml:mo>&#x003D;</mml:mo><mml:munder><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mi>m</mml:mi></mml:munder><mml:mrow><mml:munder><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mi>n</mml:mi></mml:munder><mml:mrow><mml:mrow><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:mn>1</mml:mn><mml:mo>&#x002B;</mml:mo><mml:mrow><mml:msup><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>m</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>n</mml:mi></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:mrow></mml:mfrac></mml:mrow></mml:mrow></mml:mrow><mml:mi>p</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>m</mml:mi><mml:mo>,</mml:mo><mml:mi>n</mml:mi></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>.</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
</sec>
<sec id="s3_3_2">
<label>3.3.2</label>
<title>Texture Features</title>
<p>Texture feature is an important low-level feature that divides an image into the region of interests (ROIs) for classification. It provides more details about a specific region in the image. Several methods can be used to describe the main texture features, such as coarseness and regularity. The GLCM measure is one of the most important measures that can be used to describe the texture and to estimate the special dependency of the gray level of an image [<xref ref-type="bibr" rid="ref-21">21</xref>]. In this paper, the texture features, including the contrast, correlation, energy, homogeneity, and entropy, are extracted from the walnut leaf through the GLCM. The GLCM features extracted from the image shown in <xref ref-type="fig" rid="fig-6">Fig. 6(b)</xref> are given in <xref ref-type="table" rid="table-2">Tab. 2</xref>. The formal definitions of the texture features used in this work are as follows.</p>
<table-wrap id="table-2">
<label>Table 2</label>
<caption>
<title>The features extracted from <xref ref-type="fig" rid="fig-7">Fig. 7(b)</xref> on the different angles (0&#x00B0;, 45&#x00B0;, 90&#x00B0;, and 135&#x00B0;)</title>
</caption>
<table>
<colgroup>
<col/>
<col/>
<col/>
<col/>
<col/>
<col/>
</colgroup>
<thead>
<tr>
<th>Angles</th>
<th>Contrast</th>
<th>Correlation</th>
<th>Energy</th>
<th>Homogeneity</th>
<th>Entropy</th>
</tr>
</thead>
<tbody>
<tr>
<td>0&#x00B0;</td>
<td>0.0505821</td>
<td>0.8377366</td>
<td>0.6402496</td>
<td>0.9747089</td>
<td>2.8055792</td>
</tr>
<tr>
<td>45&#x00B0;</td>
<td>0.0478247</td>
<td>0.8227010</td>
<td>0.6847217</td>
<td>0.9760876</td>
<td>2.5362953</td>
</tr>
<tr>
<td>90&#x00B0;</td>
<td>0.0763174</td>
<td>0.7559139</td>
<td>0.6168412</td>
<td>0.9618412</td>
<td>2.8088727</td>
</tr>
<tr>
<td>135&#x00B0;</td>
<td>0.0495098</td>
<td>0.8413201</td>
<td>0.6409327</td>
<td>0.9752450</td>
<td>2.8066372</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The contrast measures the intensity between a pixel and its neighboring pixel over the whole image, and it is considered to be zero for a constant image; it is also known as a variance or a moment of inertia and is computed as follows:</p>
<p><disp-formula id="eqn-10">
<label>(10)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-10.png"/><tex-math id="tex-eqn-10"><![CDATA[$$\matrix{ {Contrast = \sum\limits_{i,j = 0}^{N - 1} {{G_{i,j{{\left( {i - j} \right)}^2}}}} ; } \cr }$$]]></tex-math>--><mml:math id="mml-eqn-10" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>C</mml:mi><mml:mi>o</mml:mi><mml:mi>n</mml:mi><mml:mi>t</mml:mi><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi><mml:mo>&#x003D;</mml:mo><mml:munderover><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mrow><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mrow><mml:msup><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo>;</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where, <italic>g</italic><sub><italic>i,j</italic></sub> represents the GLCM, and <inline-formula id="ieqn-4">
<!--<alternatives><inline-graphic xlink:href="ieqn-4.tif"/><tex-math id="tex-ieqn-4"><![CDATA[$i$]]></tex-math>--><mml:math id="mml-ieqn-4"><mml:mi>i</mml:mi></mml:math>
<!--</alternatives>--></inline-formula> and <inline-formula id="ieqn-5">
<!--<alternatives><inline-graphic xlink:href="ieqn-5.tif"/><tex-math id="tex-ieqn-5"><![CDATA[$j$]]></tex-math>--><mml:math id="mml-ieqn-5"><mml:mi>j</mml:mi></mml:math>
<!--</alternatives>--></inline-formula> represent the gray value of a pixel on row and column, respectively.</p>
<p>Correlation represents the estimation of the correlation of a pixel and its neighboring pixel over the entire image and can be computed as follows:</p>
<p><disp-formula id="eqn-11">
<label>(11)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-11.png"/><tex-math id="tex-eqn-11"><![CDATA[$$\matrix{ {{\rm{Correlation}} = \sum\limits_{i,j = 0}^{N - 1} {{G_{i,j}}} \left[ {{{\left( {i - {\mu _i}} \right)\left( {j - {\mu _j}} \right)} \over {\sqrt {\left( {\sigma _i^2} \right)\left( {\sigma _j^2} \right)} }}} \right]; } \cr }$$]]></tex-math>--><mml:math id="mml-eqn-11" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mrow><mml:mrow><mml:mi mathvariant="normal">C</mml:mi><mml:mi mathvariant="normal">o</mml:mi><mml:mi mathvariant="normal">r</mml:mi><mml:mi mathvariant="normal">r</mml:mi><mml:mi mathvariant="normal">e</mml:mi><mml:mi mathvariant="normal">l</mml:mi><mml:mi mathvariant="normal">a</mml:mi><mml:mi mathvariant="normal">t</mml:mi><mml:mi mathvariant="normal">i</mml:mi><mml:mi mathvariant="normal">o</mml:mi><mml:mi mathvariant="normal">n</mml:mi></mml:mrow></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:munderover><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mrow><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mrow><mml:mfrac><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mi>&#x03BC;</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>j</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mi>&#x03BC;</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:msqrt><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msubsup><mml:mi>&#x03C3;</mml:mi><mml:mi>i</mml:mi><mml:mn>2</mml:mn></mml:msubsup></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:msubsup><mml:mi>&#x03C3;</mml:mi><mml:mi>j</mml:mi><mml:mn>2</mml:mn></mml:msubsup></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:msqrt></mml:mrow></mml:mfrac></mml:mrow></mml:mrow><mml:mo>]</mml:mo></mml:mrow><mml:mo>;</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where, <inline-formula id="ieqn-6">
<!--<alternatives><inline-graphic xlink:href="ieqn-6.tif"/><tex-math id="tex-ieqn-6"><![CDATA[${\mu _i}$]]></tex-math>--><mml:math id="mml-ieqn-6"><mml:mrow><mml:msub><mml:mi>&#x03BC;</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> and <inline-formula id="ieqn-7">
<!--<alternatives><inline-graphic xlink:href="ieqn-7.tif"/><tex-math id="tex-ieqn-7"><![CDATA[${\mu _j}$]]></tex-math>--><mml:math id="mml-ieqn-7"><mml:mrow><mml:msub><mml:mi>&#x03BC;</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> corresponds to the average on row i and column j, respectively and <inline-formula id="ieqn-8">
<!--<alternatives><inline-graphic xlink:href="ieqn-8.tif"/><tex-math id="tex-ieqn-8"><![CDATA[${\sigma _i}$]]></tex-math>--><mml:math id="mml-ieqn-8"><mml:mrow><mml:msub><mml:mi>&#x03C3;</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> and <inline-formula id="ieqn-9">
<!--<alternatives><inline-graphic xlink:href="ieqn-9.tif"/><tex-math id="tex-ieqn-9"><![CDATA[${\sigma _j}$]]></tex-math>--><mml:math id="mml-ieqn-9"><mml:mrow><mml:msub><mml:mi>&#x03C3;</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:math>
<!--</alternatives>--></inline-formula> correspond to the variance on row i and column j, respectively.</p>
<p><bold>Energy</bold> denotes the sum of squared elements in the GLCM, and by default, it is one for a constant image. The energy is also known as the angular second moment, and it is calculated by</p>
<p><disp-formula id="eqn-12">
<label>(12)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-12.png"/><tex-math id="tex-eqn-12"><![CDATA[$$\matrix{ {Energy = \sum\limits_{i,j = 0}^{N - 1} G {{\left( {i,j} \right)}^2}. } \cr }$$]]></tex-math>--><mml:math id="mml-eqn-12" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>E</mml:mi><mml:mi>n</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>g</mml:mi><mml:mi>y</mml:mi><mml:mo>&#x003D;</mml:mo><mml:munderover><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mi>G</mml:mi><mml:mrow><mml:msup><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow><mml:mo>.</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<p><bold>Homogeneity</bold> is a measure of the closeness of the distribution of elements in the GLCM to the GLCM diagonal. The homogeneity is calculated by</p>
<p><disp-formula id="eqn-13">
<label>(13)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-13.png"/><tex-math id="tex-eqn-13"><![CDATA[$$\matrix{ {Homogeneity = \sum\limits_{i,j = 0}^{N - 1} {{{{G_{i.j}}} \over {1 + {{\left( {i - j} \right)}^2}}}} } \cr }$$]]></tex-math>--><mml:math id="mml-eqn-13" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>H</mml:mi><mml:mi>o</mml:mi><mml:mi>m</mml:mi><mml:mi>o</mml:mi><mml:mi>g</mml:mi><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:mi>e</mml:mi><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>y</mml:mi><mml:mo>&#x003D;</mml:mo><mml:munderover><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mrow><mml:mrow><mml:mfrac><mml:mrow><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>.</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mrow><mml:mn>1</mml:mn><mml:mo>&#x002B;</mml:mo><mml:mrow><mml:msup><mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:mrow></mml:mfrac></mml:mrow></mml:mrow></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<p><bold>Entropy</bold> is a measure of image complexity, and it also measures the disorder of the GLCM. The value of energy is calculated by</p>
<p><disp-formula id="eqn-14">
<label>(14)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-14.png"/><tex-math id="tex-eqn-14"><![CDATA[$$\matrix{ {\sum\limits_{i,j = 0}^{N - 1} {{G_{i,j}}} \left( { - ln\;{G_{i,j}}} \right) } \cr }$$]]></tex-math>--><mml:math id="mml-eqn-14" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:munderover><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mrow><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi>l</mml:mi><mml:mi>n</mml:mi><mml:mrow><mml:msub><mml:mi>G</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
</sec>
<sec id="s3_3_3">
<label>3.3.3</label>
<title>Shape Features</title>
<p>Shape features are significant because they describe an object in an image using its most important characteristics. The shape is one of the most important features for detecting infected walnut leaf images. In the walnut leaf disease images, it can be seen that the shapes of various types of diseases differ significantly. After the image segmentation process, the proposed model obtains the infected regions of the target disease, and then the areas of the infected regions are computed.</p>
<p>The percentage of the infected area of the walnut leaf image is the ratio of the area of the infected region in the leaf to that of the whole leaf. The following equation is used for computing the percentage of the infected region in a walnut leaf image.</p>
<p><disp-formula id="eqn-15">
<label>(15)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-15.png"/><tex-math id="tex-eqn-15"><![CDATA[$$\matrix{ {infecte{d_{area}} = \left( {{{A1} \over {A2}}} \right) \times 100,} \cr }$$]]></tex-math>--><mml:math id="mml-eqn-15" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mi>f</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:msub><mml:mi>d</mml:mi><mml:mrow><mml:mi>a</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>a</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:mfrac><mml:mrow><mml:mi>A</mml:mi><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>A</mml:mi><mml:mn>2</mml:mn></mml:mrow></mml:mfrac></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x00D7;</mml:mo><mml:mn>100</mml:mn><mml:mo>,</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where, <italic>A</italic>1 and <italic>A</italic>2 denote the total number of white pixels in a leaf&#x2019;s infected area and the total number of pixels in the whole leaf image, respectively.</p>
</sec>
</sec>
<sec id="s3_4">
<label>3.4</label>
<title>BPNN Model</title>
<p>A BPNN is the most widely used neural network type for classification and prediction. The back-propagation algorithm searches for the minimum value of the error function in the weight space, utilizing gradient descent. The BPNN is a multilayer network consisting of an input layer, one or more hidden layers, and an output layer. Further, the BPNN plays an active role in agriculture disease recognition, and significant results have been achieved in this field [<xref ref-type="bibr" rid="ref-22">22</xref>,<xref ref-type="bibr" rid="ref-23">23</xref>]. In this work, the BPNN algorithm is used for walnut disease identification and classification.</p>
<p>The BPNN model is trained using a dataset comprised of 70% of all images. Various techniques, including pre-processing, segmentation, and features extraction, are applied to all images, and a feature vector presented in <xref ref-type="table" rid="table-3">Tab. 3</xref> is obtained. The feature vector is fed to the network input layer. The input layer consists of 14 neurons; this number of neurons is used because 14 features are used as the input data. After being processed by the input layer, the features are fed to the hidden layer that consists of 50 neurons. The sigmoid activation function is used in the output layers. The structure of the BPNN is shown in <xref ref-type="fig" rid="fig-7">Fig. 7</xref>.</p>
<table-wrap id="table-3">
<label>Table 3</label>
<caption>
<title>Image features</title>
</caption>
<table>
<colgroup>
<col/>
<col/>
<col/>
<col/>
<col/>
<col/>
<col/>
</colgroup>
<thead>
<tr><th rowspan="2" colspan="2">Features</th><th colspan="5">Images</th>
</tr>
<tr>
<th>1</th>
<th>2</th>
<th>&#x2013;</th>
<th>&#x2013;</th>
<th>3670</th>
</tr>
</thead>
<tbody>
<tr>
<td>Texture</td>
<td><bold>1</bold></td>
<td>0.021354</td>
<td>0.025306</td>
<td><bold>&#x2013;</bold></td>
<td><bold>&#x2013;</bold></td>
<td>0.02307</td>
</tr>
<tr><td/>
<td><bold>&#x2013;</bold></td>
<td>0.923652</td>
<td>0.894944</td>
<td><bold>&#x2013;</bold></td>
<td><bold>&#x2013;</bold></td>
<td>0.552142</td>
</tr>
<tr><td/>
<td><bold>&#x2013;</bold></td>
<td>0.699408</td>
<td>0.73445</td>
<td><bold>&#x2013;</bold></td>
<td><bold>&#x2013;</bold></td>
<td>0.925952</td>
</tr>
<tr><td/>
<td><bold>5</bold></td>
<td>2.605831</td>
<td>2.378717</td>
<td><bold>&#x2013;</bold></td>
<td><bold>&#x2013;</bold></td>
<td>0.707659</td>
</tr>
<tr>
<td>Color</td>
<td><bold>6</bold></td>
<td>0.005924</td>
<td>&#x2212;0.00723</td>
<td><bold>&#x2013;</bold></td>
<td><bold>&#x2013;</bold></td>
<td>&#x2013;0.02551</td>
</tr>
<tr><td/>
<td><bold>&#x2013;</bold></td>
<td>0.03457</td>
<td>0.056647</td>
<td></td>
<td></td>
<td>0.02104</td>
</tr>
<tr><td/>
<td><bold>&#x2013;</bold></td>
<td>1.239079</td>
<td>1.330501</td>
<td></td>
<td></td>
<td>1.009094</td>
</tr>
<tr><td/>
<td><bold>13</bold></td>
<td>&#x2013;6.19647</td>
<td>&#x2013;5.41792</td>
<td></td>
<td></td>
<td>&#x2013;20.2083</td>
</tr>
<tr>
<td>Shape</td>
<td><bold>14</bold></td>
<td>0.150005</td>
<td>0.15184</td>
<td></td>
<td></td>
<td>0.001502</td>
</tr>
</tbody>
</table>
</table-wrap>
<fig id="fig-7">
<label>Figure 7</label>
<caption>
<title>Structure of the BPNN</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_18039-fig-7.png"/>
</fig>
<p>In the training process, the maximum number of training epochs was set to 1000, the inertia coefficient was set to 0.8, and the learning efficiency was set to 0.01.</p>
<p>The weighted output (<italic>X<sub>i</sub>W<sub>ij</sub></italic>) of the hidden layer&#x2019;s neuron <italic>j</italic> is added to the bias value of a neuron <italic>j</italic> in the output layer(𝜃<sub>𝑗</sub>) to obtain the output of the neuron <italic>j</italic> in the output layer (<italic>I<sub>j</sub></italic>) r, which is expressed as:</p>
<p><disp-formula id="eqn-16">
<label>(16)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-16.png"/><tex-math id="tex-eqn-16"><![CDATA[$$\matrix{ {{I_j} = \sum\limits_n {{W_{ij}}} {X_i} + {\theta _j}, } \cr }$$]]></tex-math>--><mml:math id="mml-eqn-16" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mrow><mml:msub><mml:mi>I</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:munder><mml:mo movablelimits="false">&#x2211;</mml:mo><mml:mi>n</mml:mi></mml:munder><mml:mrow><mml:mrow><mml:msub><mml:mi>W</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mrow><mml:mrow><mml:msub><mml:mi>X</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mo>&#x002B;</mml:mo><mml:mrow><mml:msub><mml:mi>&#x03B8;</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow><mml:mo>,</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where <italic>X<sub>i</sub></italic> represents the input data of the output layer, which is the output data of the hidden layer; <italic>W<sub>ij</sub></italic> represents the weight value of the connection between neuron <italic>i</italic> in the hidden layer and neuron <italic>j</italic> in the output layer, and <italic>&#x03B8;j</italic> represents the bias value of neuron <italic>j</italic>.</p>
<p>The input <italic>I<sub>j</sub></italic> passes through an activation function <italic>f</italic> of the output layer to produce the desired network output Z<sub><italic>j</italic></sub>. In the present work, the sigmoid activation function has been used as an activation function, and it is expressed by</p>
<p><disp-formula id="eqn-17">
<label>(17)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-17.png"/><tex-math id="tex-eqn-17"><![CDATA[$$\matrix{ {f\left( {{I_j}} \right) = {1 \over {1 + {e^{ - {I_j}}}}}. }}$$]]></tex-math>--><mml:math id="mml-eqn-17" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>f</mml:mi><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:msub><mml:mi>I</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x003D;</mml:mo><mml:mrow><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:mn>1</mml:mn><mml:mo>&#x002B;</mml:mo><mml:mrow><mml:msup><mml:mi>e</mml:mi><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:msub><mml:mi>I</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:mrow></mml:msup></mml:mrow></mml:mrow></mml:mfrac></mml:mrow><mml:mo>.</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<table-wrap id="table-4">
<label>Table 4</label>
<caption>
<title>Number of images with class labels</title>
</caption>
<table>
<colgroup>
<col/>
<col/>
<col/>
<col/>
<col/>
<col/>
<col/>
<col/>
<col/>
<col/>
</colgroup>
<thead>
<tr>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
</tr>
</thead>
<tbody>
<tr>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The proposed model was developed using the <italic>MATLAB</italic><sup>&#x00AE;</sup> 2016 software environment. The trained network was stored in the form of <italic>.net1</italic> file in <italic>MATLAB</italic><sup>&#x00AE;</sup> 2016. The trained network was tested using the test dataset.</p>
</sec>
<sec id="s3_5">
<label>3.5</label>
<title>mSVM</title>
<p>The SVM is one of the most popular supervised machine learning algorithms for object detection and image classification [<xref ref-type="bibr" rid="ref-24">24</xref>]. The SVM divides the training dataset into two classes and forms the optimum separating hyperplanes. The feature vector of images in the first-class lies on one side of the hyperplane, and the feature vector of images in the second class lies on the opposite side of the hyperplane. The number of hyperplanes in the SVM depends on the number of classes. In this paper, the mSVM is used for walnut leaf disease detection and classification using a linear kernel function. The number of iterations was set to 500 during the training process of mSVM l. The same process is applied for training the BPNN on the training dataset to obtain the feature vector, which is presented in <xref ref-type="table" rid="table-3">Tab. 3</xref>. The feature vector was used as an input to the mSVM. By applying the mSVM, the feature vector was used to classify images into three classes: infected images I, infected images II, and non-infected images. The training set of the proposed model included images of three classes. The first class included the images of walnut leaves infected by the anthracnose, the second class included the images of walnut leaves infected by the leaf blotch disease, and the third class included images of healthy walnut leaves. The first, second, and third classes were labeled as 1, 2, and 3, respectively, as presented in <xref ref-type="table" rid="table-4">Tab. 4</xref>. The training set contained 70% of all the images of walnut leaves.</p>
</sec>
</sec>
<sec id="s4">
<label>4</label>
<title>Results and Discussions</title>
<p>The walnut leaf images were processed, and the image features were extracted using the GLCM and the color moment. The extracted features were fed to the classifier to predict the walnut leaf diseases, namely, the anthracnose and blotch diseases. The collection of images for model development was the foremost and crucial task in this work. Images of infected walnut leaves were captured on the black background using a high-resolution camera, having a 180-dpi resolution. The images were stored in a JPG or PNG format. First, an infected leaf was put on the black background under an appropriate light source. To improve the image&#x2019;s view and brilliance, it was ensured that reflection was eliminated and that the light was uniformly dispersed. The leaf images were appropriately zoomed to ensure that an image included a leaf and the background. The total data consisted of 3670 images of walnut leaves with a resolution of <inline-formula id="ieqn-10">
<!--<alternatives><inline-graphic xlink:href="ieqn-10.tif"/><tex-math id="tex-ieqn-10"><![CDATA[$512 \times 512$]]></tex-math>--><mml:math id="mml-ieqn-10"><mml:mn>512</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:mn>512</mml:mn></mml:math>
<!--</alternatives>--></inline-formula> pixels; 70% of all images were used for training, and the remaining 30% of images were used for the test. The overall data included 2415 images of the walnut leaves with the anthracnose disease, 740 images of the walnut leaves with the blotch disease, and 515 images of healthy walnut leaves. After the model training with the training dataset, the trained model was tested using the test dataset to verify its ability to recognize walnut leaf diseases.</p>
<sec id="s4_1">
<label>4.1</label>
<title>Classifier Performance Analysis</title>
<p>The detection accuracy was used to test the trained network&#x2019;s performance on the test set, and it was calculated by:</p>
<p><disp-formula id="eqn-18">
<label>(18)</label>
<!--<alternatives>
<graphic mimetype="image" mime-subtype="png" xlink:href="eqn-18.png"/><tex-math id="tex-eqn-18"><![CDATA[$$\matrix{ {Accuracy = {{TP \;+\; TN} \over {Total}},} \cr }$$]]></tex-math>--><mml:math id="mml-eqn-18" display="block"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mrow><mml:mi>A</mml:mi><mml:mi>c</mml:mi><mml:mi>c</mml:mi><mml:mi>u</mml:mi><mml:mi>r</mml:mi><mml:mi>a</mml:mi><mml:mi>c</mml:mi><mml:mi>y</mml:mi><mml:mo>&#x003D;</mml:mo><mml:mrow><mml:mfrac><mml:mrow><mml:mi>T</mml:mi><mml:mi>P</mml:mi><mml:mo>&#x002B;</mml:mo><mml:mi>T</mml:mi><mml:mi>N</mml:mi></mml:mrow><mml:mrow><mml:mi>T</mml:mi><mml:mi>o</mml:mi><mml:mi>t</mml:mi><mml:mi>a</mml:mi><mml:mi>l</mml:mi></mml:mrow></mml:mfrac></mml:mrow><mml:mo>,</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math>
<!--</alternatives>--></disp-formula></p>
<p>where <inline-formula id="ieqn-11">
<!--<alternatives><inline-graphic xlink:href="ieqn-11.tif"/><tex-math id="tex-ieqn-11"><![CDATA[$TP$]]></tex-math>--><mml:math id="mml-ieqn-11"><mml:mi>T</mml:mi><mml:mi>P</mml:mi></mml:math>
<!--</alternatives>--></inline-formula> denotes the number of true positives, <inline-formula id="ieqn-12">
<!--<alternatives><inline-graphic xlink:href="ieqn-12.tif"/><tex-math id="tex-ieqn-12"><![CDATA[$TN$]]></tex-math>--><mml:math id="mml-ieqn-12"><mml:mi>T</mml:mi><mml:mi>N</mml:mi></mml:math>
<!--</alternatives>--></inline-formula> denotes the number of true negatives, and <inline-formula id="ieqn-13">
<!--<alternatives><inline-graphic xlink:href="ieqn-13.tif"/><tex-math id="tex-ieqn-13"><![CDATA[$total$]]></tex-math>--><mml:math id="mml-ieqn-13"><mml:mi>t</mml:mi><mml:mi>o</mml:mi><mml:mi>t</mml:mi><mml:mi>a</mml:mi><mml:mi>l</mml:mi></mml:math>
<!--</alternatives>--></inline-formula> is the total number of test images.</p>
<p><bold>BPNN classifier:</bold> The accuracy of the BPNN is presented in <xref ref-type="fig" rid="fig-8">Fig. 8</xref>, where it can be seen that the BPNN achieved the highest accuracy of 97.8% for anthracnose disease, while the accuracy values for the other two image classes were lower. The overall accuracy of the BPNN in the walnut leaf disease detection was 95.3%. The accuracy values of the BPNN for the three image classes are given in <xref ref-type="table" rid="table-5">Tab. 5</xref>.</p>
<table-wrap id="table-5">
<label>Table 5</label>
<caption>
<title>The recognition rate of the BPNN</title>
</caption>
<table>
<colgroup>
<col/>
<col/>
<col/>
<col/>
</colgroup>
<thead>
<tr>
<th>Disease</th>
<th>Anthracnose</th>
<th>Leaf blotch</th>
<th>Healthy leaf</th>
</tr>
</thead>
<tbody>
<tr>
<td>BPNN</td>
<td>97.8%</td>
<td>95.6%</td>
<td>93.3%</td>
</tr>
</tbody>
</table>
</table-wrap>
<table-wrap id="table-6">
<label>Table 6</label>
<caption>
<title>The recognition rate of mSVM</title>
</caption>
<table>
<colgroup>
<col/>
<col/>
<col/>
<col/>
</colgroup>
<thead>
<tr>
<th>Diseases</th>
<th>Anthracnose Leaf</th>
<th>Leaf blotch</th>
<th>Healthy leaf</th>
</tr>
</thead>
<tbody>
<tr>
<td>mSVM</td>
<td>95.6%</td>
<td>91%</td>
<td>86.7%</td>
</tr>
</tbody>
</table>
</table-wrap>
<p><bold>Multiclass SVM:</bold> The accuracy of the mSVM in the walnut leaf disease detection is shown in <xref ref-type="fig" rid="fig-9">Fig. 9</xref>. The overall accuracy of mSVM was 91.1%. The accuracy values of the mSVM for the three image classes are given in <xref ref-type="table" rid="table-6">Tab. 6</xref>.</p>
<fig id="fig-8">
<label>Figure 8</label>
<caption>
<title>Represent accuracy of BPNN</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_18039-fig-8.png"/>
</fig>
</sec>
<sec id="s4_2">
<label>4.2</label>
<title>Comparison of BPNN and mSVM</title>
<p>The accuracies of the two classifiers expressed in percentage are given in <xref ref-type="table" rid="table-7">Tab. 7</xref>. The recognition rates of the BPNN classifier for the anthracnose, leaf blotch and healthy leaves were 97.8%, 95.6%, and 93.33%, respectively; the overall accuracy of the BPNN was 95.3%. The recognition rates of the mSVM for the anthracnose, leaf blotch and healthy leaves were 95.6%, 91%, and 86.7%, respectively. The overall accuracy of the mSVM was 91.1%. The accuracy comparison of the two models is shown in <xref ref-type="fig" rid="fig-10">Fig. 10</xref>, where it can be seen that the BPNN outperformed the mSVM. This could be because the BPNN learned the loss function parameters and changed their values in each iteration. In contrast, in the mSVM, the detection result was mostly based on a fixed value and could not be changed by calculating the error rate. Furthermore, the BP neural networks had strong self-learning and self-adaptive abilities and fast calculation speeds for large samples, which allows the best prediction models in the walnut leaf diseases. Due to the mentioned reasons mentioned above, the BPNN model result was comparatively better than that of the mSVM.</p>
<fig id="fig-9">
<label>Figure 9</label>
<caption>
<title>Accuracy of mSVM</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_18039-fig-9.png"/>
</fig>
<fig id="fig-10">
<label>Figure 10</label>
<caption>
<title>Comparison of classifiers</title>
</caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_18039-fig-10.png"/>
</fig>
<table-wrap id="table-7">
<label>Table 7</label>
<caption>
<title>Performance analysis of classifiers</title>
</caption>
<table>
<colgroup>
<col/>
<col/>
<col/>
</colgroup>
<thead>
<tr>
<th>Diseases</th>
<th>mSVM</th>
<th>BPNN</th>
</tr>
</thead>
<tbody>
<tr>
<td>Anthracnose</td>
<td>95.6%</td>
<td>97.8%</td>
</tr>
<tr>
<td>Leaf blotch</td>
<td>91.66%</td>
<td>95.6%</td>
</tr>
<tr>
<td>Healthy</td>
<td>86.7%</td>
<td>93.3%</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
</sec>
<sec id="s5">
<label>5</label>
<title>Conclusion</title>
<p>This paper proposes a machine learning-based method for image-based detection and classification of fungi diseases, such as anthracnose and leaf blotch in walnut leaves. In the proposed method, the input image is pre-processed first, and then the RBG image is converted to the YUV color space. Next, the image is segmented by the Otsu thresholding algorithm, and the color and texture features and an affected area are extracted from the segmented images. The proposed model is trained using 70% of all images and tested using the remaining 30% of the images. In order to illustrate the applicability and performance of the proposed detection model, it is compared with the mSVM model. The experimental results prove the superiority of the BPNN over the mSVM in walnut leaf disease identification and classification. Farmers can use our proposed method for the detection of walnut diseases. Besides the two machine learning-based models analyzed in this study, several other deep learning-based methods have been used in the related literature [<xref ref-type="bibr" rid="ref-25">25</xref>&#x2013;<xref ref-type="bibr" rid="ref-29">29</xref>]. In future studies, the proposed BPNN model will be compared with these methods.</p>
</sec>
</body>
<back><fn-group>
<fn fn-type="other">
<p><bold>Funding Statement:</bold> This research was financially supported by the Xiamen University Malaysia (Project code: [XMUMRF/2018-C2/IECE/0002]).</p>
</fn>
<fn fn-type="conflict">
<p><bold>Conflicts of Interest:</bold> The authors declare that they have no conflicts of interest to report regarding the present study.</p>
</fn>
</fn-group>
<ref-list content-type="authoryear">
<title>References</title>
<ref id="ref-1">
<label>1</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>A.</given-names> 
<surname>Rehman</surname></string-name>, <string-name>
<given-names>L.</given-names> 
<surname>Jingdong</surname></string-name>, <string-name>
<given-names>B.</given-names> 
<surname>Shahzad</surname></string-name>, <string-name>
<given-names>A. A.</given-names> 
<surname>Chandio</surname></string-name>, <string-name>
<given-names>I.</given-names> 
<surname>Hussain</surname></string-name> <etal>et al.</etal>
</person-group><italic>,</italic> &#x201C;
<article-title>Economic perspectives of major field crops of Pakistan: An empirical study</article-title>,&#x201D; 
<source>Pacific Science Review B: Humanities and Social Sciences</source>, vol. 
<volume>1</volume>, no. 
<issue>3</issue>, pp. 
<fpage>145</fpage>&#x2013;
<lpage>158</lpage>, 
<year>2015</year>.</mixed-citation>
</ref>
<ref id="ref-2">
<label>2</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>A.</given-names> 
<surname>Azam</surname></string-name> and <string-name>
<given-names>M.</given-names> 
<surname>Shafique</surname></string-name>
</person-group>, &#x201C;
<article-title>Agriculture in Pakistan and its impact on economy. A review</article-title>,&#x201D; 
<source>International Journal of Advanced Science and Technology</source>, vol. 
<volume>103</volume>, pp. 
<fpage>47</fpage>&#x2013;
<lpage>60</lpage>, 
<year>2017</year>.</mixed-citation>
</ref>
<ref id="ref-3">
<label>3</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>M.</given-names> 
<surname>Sameeullah</surname></string-name> and <string-name>
<given-names>T.</given-names> 
<surname>Karaden&#x0130;z</surname></string-name>
</person-group>, &#x201C;
<article-title>Walnut production status in Pakistan</article-title>,&#x201D; 
<source>Bah&#x00E7;e</source>, vol. 
<volume>46</volume>, no. 
<issue>2</issue>, pp. 
<fpage>113</fpage>&#x2013;
<lpage>115</lpage>, 
<year>2017</year>.</mixed-citation>
</ref>
<ref id="ref-4">
<label>4</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>H.</given-names> 
<surname>Mudasir</surname></string-name> and <string-name>
<given-names>K.</given-names> 
<surname>Ahmad</surname></string-name>
</person-group>, &#x201C;
<article-title>Anthracnose disease of walnut&#x2014;A review</article-title>,&#x201D; 
<source>International Journal of Environment, Agriculture and Biotechnology</source>, vol. 
<volume>2</volume>, no. 
<issue>5</issue>, pp. 
<fpage>238908</fpage>, 
<year>2017</year>.</mixed-citation>
</ref>
<ref id="ref-5">
<label>5</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>P.</given-names> 
<surname>Pollegioni</surname></string-name>, <string-name>
<given-names>G. V.</given-names> 
<surname>Linden</surname></string-name>, <string-name>
<given-names>A.</given-names> 
<surname>Belisario</surname></string-name>, <string-name>
<given-names>M.</given-names> 
<surname>Gras</surname></string-name>, <string-name>
<given-names>N.</given-names> 
<surname>Anselmi </surname></string-name> <etal>et al.</etal>
</person-group><italic>,</italic> &#x201C;
<article-title>Mechanisms governing the responses to anthracnose pathogen in Juglans spp</article-title>,&#x201D; 
<source>Journal of Biotechnology</source>, vol. 
<volume>159</volume>, no. 
<issue>4</issue>, pp. 
<fpage>251</fpage>&#x2013;
<lpage>264</lpage>, 
<year>2012</year>.</mixed-citation>
</ref>
<ref id="ref-6">
<label>6</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>Z.</given-names> 
<surname>Chuanlei</surname></string-name>, <string-name>
<given-names>Z.</given-names> 
<surname>Shanwen</surname></string-name>, <string-name>
<given-names>Y.</given-names> 
<surname>Jucheng</surname></string-name>, <string-name>
<given-names>S.</given-names> 
<surname>Yancui</surname></string-name> and <string-name>
<given-names>C.</given-names> 
<surname>Jia</surname></string-name>
</person-group>, &#x201C;
<article-title>Apple leaf disease identification using genetic algorithm and correlation-based feature selection method</article-title>,&#x201D; 
<source>International Journal of Agricultural and Biological Engineering</source>, vol. 
<volume>10</volume>, no. 
<issue>2</issue>, pp. 
<fpage>74</fpage>&#x2013;
<lpage>83</lpage>, 
<year>2017</year>.</mixed-citation>
</ref>
<ref id="ref-7">
<label>7</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>T. G.</given-names> 
<surname>Devi</surname></string-name> and <string-name>
<given-names>P.</given-names> 
<surname>Neelamegam</surname></string-name>
</person-group>, &#x201C;
<article-title>Image processing-based rice plant leaves diseases in Thanjavur, Tamilnadu</article-title>,&#x201D; 
<source> Cluster Computing</source>, vol. 
<volume>22</volume>, no. 
<issue>6</issue>, pp. 
<fpage>13415</fpage>&#x2013;
<lpage>13428</lpage>, 
<year>2019</year>.</mixed-citation>
</ref>
<ref id="ref-8">
<label>8</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>S.</given-names> 
<surname>Khalesi</surname></string-name>, <string-name>
<given-names>A.</given-names> 
<surname>Mahmoudi</surname></string-name>, <string-name>
<given-names>A.</given-names> 
<surname>Hosainpour</surname></string-name> and <string-name>
<given-names>A.</given-names> 
<surname>Alipour</surname></string-name>
</person-group>, &#x201C;
<article-title>Detection of walnut varieties using impact acoustics and artificial neural networks (ANNs)</article-title>,&#x201D; 
<source>Modern Applied Science</source>, vol. 
<volume>6</volume>, no. 
<issue>1</issue>, pp. 
<fpage>43</fpage>, 
<year>2012</year>.</mixed-citation>
</ref>
<ref id="ref-9">
<label>9</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>B.</given-names> 
<surname>Tigadi</surname></string-name> and <string-name>
<given-names>B.</given-names> 
<surname>Sharma</surname></string-name>
</person-group>, &#x201C;
<article-title>Banana plant disease detection and grading using image processing</article-title>,&#x201D; 
<source>International Journal of Engineering Science</source>, vol. 
<volume>6</volume>, no. 6, pp. 6512&#x2013;6516, 
<year>2016</year>.</mixed-citation>
</ref>
<ref id="ref-10">
<label>10</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>M.</given-names> 
<surname>Bhange</surname></string-name> and <string-name>
<given-names>H.</given-names> 
<surname>Hingoliwala</surname></string-name>
</person-group>, &#x201C;
<article-title>Smart farming: Pomegranate disease detection using image processing</article-title>,&#x201D; 
<source>Procedia Computer Science</source>, vol. 
<volume>58</volume>, pp. 
<fpage>280</fpage>&#x2013;
<lpage>288</lpage>, 
<year>2015</year>.</mixed-citation>
</ref>
<ref id="ref-11">
<label>11</label><mixed-citation publication-type="conf-proc">
<person-group person-group-type="author"><string-name>
<given-names>H.</given-names> 
<surname>Waghmare</surname></string-name>, <string-name>
<given-names>R.</given-names> 
<surname>Kokare</surname></string-name> and <string-name>
<given-names>Y.</given-names> 
<surname>Dandawate</surname></string-name>
</person-group>, &#x201C;
<article-title>Detection and classification of diseases of grape plant using opposite colour local binary pattern feature and machine learning for automated decision support system</article-title>,&#x201D; in <conf-name>Proc. 3rd Int. Conf. on Signal Processing and Integrated Networks (SPIN)</conf-name>, 
<publisher-loc>Noida</publisher-loc>: 
<publisher-name>Amity University</publisher-name>, pp. 
<fpage>513</fpage>&#x2013;
<lpage>518</lpage>, 
<year>2016</year>. </mixed-citation>
</ref>
<ref id="ref-12">
<label>12</label><mixed-citation publication-type="conf-proc">
<person-group person-group-type="author"><string-name>
<given-names>A.</given-names> 
<surname>Awate</surname></string-name>, <string-name>
<given-names>D.</given-names> 
<surname>Deshmankar</surname></string-name>, <string-name>
<given-names>G.</given-names> 
<surname>Amrutkar</surname></string-name>, <string-name>
<given-names>G.</given-names> 
<surname>Amrutkar</surname></string-name>, <string-name>
<given-names>U.</given-names> 
<surname>Bagul</surname></string-name> <etal>et al.</etal>
</person-group><italic>,</italic> &#x201C;
<article-title>Fruit disease detection using color, texture analysis and ANN</article-title>,&#x201D; in <conf-name>Proc. ICGCIoT</conf-name>, 
<publisher-loc>Greater Noida, Delhi, India</publisher-loc>, pp. 
<fpage>970</fpage>&#x2013;
<lpage>975</lpage>, 
<year>2015</year>. </mixed-citation>
</ref>
<ref id="ref-13">
<label>13</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>D. E.</given-names> 
<surname>Kusumandari</surname></string-name>, <string-name>
<given-names>M.</given-names> 
<surname>Adzkia</surname></string-name>, <string-name>
<given-names>S. P.</given-names> 
<surname>Gultom</surname></string-name>, <string-name>
<given-names>M.</given-names> 
<surname>Turnip</surname></string-name> and <string-name>
<given-names>A.</given-names> 
<surname>Turnip</surname></string-name>
</person-group>, &#x201C;
<article-title>Detection of strawberry plant disease based on leaf spot using color segmentation</article-title>,&#x201D; In 
<source>Proc. 2nd Int. Conf. on Mechanical, Electronics, Computer, and Industrial Technology</source>, vol. 
<volume>1230</volume>, no. 
<issue>1</issue>, pp. 
<fpage>012092</fpage>, 
<year>2019</year>.</mixed-citation>
</ref>
<ref id="ref-14">
<label>14</label><mixed-citation publication-type="conf-proc">
<person-group person-group-type="author"><string-name>
<given-names>I. S.</given-names> 
<surname>Areni</surname></string-name> and <string-name>
<given-names>R.</given-names> 
<surname>Tamin</surname></string-name>
</person-group>, &#x201C;
<article-title>Image processing system for early detection of cocoa fruit pest attack</article-title>,&#x201D; in <conf-name>Proc. 3rd Int. Conf. on Mathematics, Sciences, Technology, Education and Their Applications</conf-name>, vol. 1244, 
<publisher-loc>Sulawesi, Selatan, Indonesia</publisher-loc>, pp. 
<fpage>1</fpage>, 
<year>2019</year>. </mixed-citation>
</ref>
<ref id="ref-15">
<label>15</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>J.</given-names> 
<surname>Zhu</surname></string-name>, <string-name>
<given-names>A.</given-names> 
<surname>Wu</surname></string-name>, <string-name>
<given-names>X.</given-names> 
<surname>Wang</surname></string-name> and <string-name>
<given-names>H.</given-names> 
<surname>Zhang</surname></string-name>
</person-group>, &#x201C;
<article-title>Identification of grape diseases using image analysis and BP neural networks</article-title>,&#x201D; 
<source>Multimedia Tools and Applications</source>, vol. 
<volume>79</volume>, pp. 
<fpage>14539</fpage>&#x2013;
<lpage>14551</lpage>, 
<year>2019</year>.</mixed-citation>
</ref>
<ref id="ref-16">
<label>16</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>Y.</given-names> 
<surname>Feng</surname></string-name>, <string-name>
<given-names>H.</given-names> 
<surname>Zhao</surname></string-name>, <string-name>
<given-names>X.</given-names> 
<surname>Li</surname></string-name>, <string-name>
<given-names>X.</given-names> 
<surname>Zhang</surname></string-name> and <string-name>
<given-names>H.</given-names> 
<surname>Li</surname></string-name>
</person-group>, &#x201C;
<article-title>A multi-scale 3D Otsu thresholding algorithm for medical image segmentation</article-title>,&#x201D; 
<source>Digital Signal Processing</source>, vol. 
<volume>60</volume>, pp. 
<fpage>186</fpage>&#x2013;
<lpage>199</lpage>, 
<year>2017</year>.</mixed-citation>
</ref>
<ref id="ref-17">
<label>17</label><mixed-citation publication-type="conf-proc">
<person-group person-group-type="author"><string-name>
<given-names>D.</given-names> 
<surname>Liu</surname></string-name> and <string-name>
<given-names>J.</given-names> 
<surname>Yu</surname></string-name>
</person-group>, &#x201C;
<article-title>Otsu method and k-means</article-title>,&#x201D; in <conf-name>Proc. of the 2009 Ninth Int. Conf. on Hybrid Intelligent Systems</conf-name>, 
<publisher-loc>Washington, DC, United States</publisher-loc>, 
<volume>1</volume>, pp. 
<fpage>344</fpage>&#x2013;
<lpage>349</lpage>, 
<year>2009</year>. </mixed-citation>
</ref>
<ref id="ref-18">
<label>18</label><mixed-citation publication-type="book">
<person-group person-group-type="author"><string-name>
<given-names>J.</given-names> 
<surname>Yousefi</surname></string-name>
</person-group>, 
<source>Image binarization using Otsu thresholding algorithm</source>. 
<publisher-loc>Ontario, Canada</publisher-loc>: 
<publisher-name>University of Guelph</publisher-name>, 
<year>2011</year>.</mixed-citation>
</ref>
<ref id="ref-19">
<label>19</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>P. S.</given-names> 
<surname>Suhasini</surname></string-name>, <string-name>
<given-names>K. S.</given-names> 
<surname>Krishna</surname></string-name> and <string-name>
<given-names>I. M.</given-names> 
<surname>Krishna</surname></string-name>
</person-group>, &#x201C;
<article-title>Content based image retrieval based on different global and local color histogram methods: A survey</article-title>,&#x201D; 
<source>Journal of the Institution of Engineers (India): Series B</source>, vol. 
<volume>98</volume>, no. 
<issue>1</issue>, pp. 
<fpage>129</fpage>&#x2013;
<lpage>135</lpage>, 
<year>2017</year>.</mixed-citation>
</ref>
<ref id="ref-20">
<label>20</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>S. M.</given-names> 
<surname>Singh</surname></string-name> and <string-name>
<given-names>K.</given-names> 
<surname>Hemachandran</surname></string-name>
</person-group>, &#x201C;
<article-title>Image retrieval based on the combination of color histogram and color moment</article-title>,&#x201D; 
<source>International Journal of Computer Applications</source>, vol. 
<volume>58</volume>, no. 
<issue>3</issue>, pp. 27&#x2013;34, 
<year>2012</year>.</mixed-citation>
</ref>
<ref id="ref-21">
<label>21</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>P. S.</given-names> 
<surname>Kumar</surname></string-name> and <string-name>
<given-names>V.</given-names> 
<surname>Dharun</surname></string-name>
</person-group>, &#x201C;
<article-title>Extraction of texture features using GLCM and shape features using connected regions</article-title>,&#x201D; 
<source>International Journal of Engineering and Technology</source>, vol. 
<volume>8</volume>, no. 
<issue>6</issue>, pp. 
<fpage>2926</fpage>&#x2013;
<lpage>2930</lpage>, 
<year>2016</year>.</mixed-citation>
</ref>
<ref id="ref-22">
<label>22</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>J.</given-names> 
<surname>Zhu</surname></string-name>, <string-name>
<given-names>W.</given-names> 
<surname>Ang</surname></string-name>, <string-name>
<given-names>W.</given-names> 
<surname>Xiushan</surname></string-name> and <string-name>
<given-names>Z.</given-names> 
<surname>Hao</surname></string-name>
</person-group>, &#x201C;
<article-title>Identification of grape diseases using image analysis and BP neural networks</article-title>,&#x201D; 
<source>Multimedia Tools and Applications</source>, vol. 
<volume>79</volume>, no. 
<issue>21</issue>, pp. 
<fpage>14539</fpage>&#x2013;
<lpage>14551</lpage>, 
<year>2020</year>.</mixed-citation>
</ref>
<ref id="ref-23">
<label>23</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>B.</given-names> 
<surname>Prakash</surname></string-name> and <string-name>
<given-names>A.</given-names> 
<surname>Yerpude</surname></string-name>
</person-group>, &#x201C;
<article-title>Identification of mango leaf disease and control prediction using image processing and neural network</article-title>,&#x201D; 
<source>International Journal for Scientific Research &#x0026; Development</source>, vol. 
<volume>3</volume>, no. 
<issue>5</issue>, pp. 794&#x2013;799, 
<year>2015</year>.</mixed-citation>
</ref>
<ref id="ref-24">
<label>24</label><mixed-citation publication-type="conf-proc">
<person-group person-group-type="author"><string-name>
<given-names>S.</given-names> 
<surname>Suthaharan</surname></string-name>
</person-group>, &#x201C;
<article-title>Support vector machine</article-title>,&#x201D; in <conf-name>Machine Learning Models and Algorithms for Big Data Classification</conf-name>, 
<publisher-loc>Boston, MA</publisher-loc>: 
<publisher-name>Springer</publisher-name>, pp. 
<fpage>207</fpage>&#x2013;
<lpage>235</lpage>, 
<year>2016</year>. </mixed-citation>
</ref>
<ref id="ref-25">
<label>25</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>M. H.</given-names> 
<surname>Saleem</surname></string-name>, <string-name>
<given-names>S.</given-names> 
<surname>Khanchi</surname></string-name>, <string-name>
<given-names>J.</given-names> 
<surname>Potgieter</surname></string-name> and <string-name>
<given-names>K. M.</given-names> 
<surname>Arif</surname></string-name>
</person-group>, &#x201C;
<article-title>Image-based plant disease identification by deep learning meta-architectures</article-title>,&#x201D; 
<source>Plants</source>, vol. 
<volume>9</volume>, no. 
<issue>11</issue>, pp. 
<fpage>p.</fpage>&#x2013;
<lpage>1451</lpage>, 
<year>2020</year>.</mixed-citation>
</ref>
<ref id="ref-26">
<label>26</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>K. P.</given-names> 
<surname>Ferentinos</surname></string-name>
</person-group>, &#x201C;
<article-title>Deep learning models for plant disease detection and diagnosis</article-title>,&#x201D; 
<source>Computers and Electronics in Agriculture</source>, vol. 
<volume>145</volume>, pp. 
<fpage>311</fpage>&#x2013;
<lpage>318</lpage>, 
<year>2018</year>.</mixed-citation>
</ref>
<ref id="ref-27">
<label>27</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>S. B.</given-names> 
<surname>Jadhav</surname></string-name>, <string-name>
<given-names>V. R.</given-names> 
<surname>Udupi</surname></string-name> and <string-name>
<given-names>S. B.</given-names> 
<surname>Patil</surname></string-name>
</person-group>, &#x201C;
<article-title>Identification of plant diseases using convolutional neural networks</article-title>,&#x201D; 
<source>International Journal of Information Technology</source>, vol. 2020, no. 1, pp. 
<fpage>1</fpage>&#x2013;
<lpage>10</lpage>, 
<year>2020</year>.</mixed-citation>
</ref>
<ref id="ref-28">
<label>28</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>S.</given-names> 
<surname>Sladojevic</surname></string-name>, <string-name>
<given-names>M.</given-names> 
<surname>Arsenovic</surname></string-name>, <string-name>
<given-names>A.</given-names> 
<surname>Anderla</surname></string-name>, <string-name>
<given-names>D.</given-names> 
<surname>Culibrk</surname></string-name> and <string-name>
<given-names>D.</given-names> 
<surname>Stefanovic</surname></string-name>
</person-group>, &#x201C;
<article-title>Deep neural networks based recognition of plant diseases by leaf image classification</article-title>,&#x201D; 
<source>Computational Intelligence and Neuroscience</source>, vol. 2016, no. 1, pp. 1&#x2013;11, 
<year>2016</year>.</mixed-citation>
</ref>
<ref id="ref-29">
<label>29</label><mixed-citation publication-type="journal">
<person-group person-group-type="author"><string-name>
<given-names>K.</given-names> 
<surname>Subhadra</surname></string-name> and <string-name>
<given-names>N.</given-names> 
<surname>Kavitha</surname></string-name>
</person-group>, &#x201C;
<article-title>A hybrid leaf disease detection scheme using gray co-occurance matrix support vector machine algorithm</article-title>,&#x201D; 
<source>International Journal of Recent Technology and Engineering (IJRTE)</source>, vol. 
<volume>8</volume>, no. 
<issue>2S11</issue>, pp. 
<fpage>2277</fpage>&#x2013;
<lpage>3878</lpage>, 
<year>2019</year>.</mixed-citation>
</ref>
</ref-list>
</back>
</article>