<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.1 20151215//EN" "http://jats.nlm.nih.gov/publishing/1.1/JATS-journalpublishing1.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="1.1">
<front>
<journal-meta>
<journal-id journal-id-type="pmc">IASC</journal-id>
<journal-id journal-id-type="nlm-ta">IASC</journal-id>
<journal-id journal-id-type="publisher-id">IASC</journal-id>
<journal-title-group>
<journal-title>Intelligent Automation &#x0026; Soft Computing</journal-title>
</journal-title-group>
<issn pub-type="epub">2326-005X</issn>
<issn pub-type="ppub">1079-8587</issn>
<publisher>
<publisher-name>Tech Science Press</publisher-name>
<publisher-loc>USA</publisher-loc>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">26051</article-id>
<article-id pub-id-type="doi">10.32604/iasc.2023.026051</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Article</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Student&#x2019;s Health Exercise Recognition Tool for E-Learning Education</article-title><alt-title alt-title-type="left-running-head">Student&#x2019;s Health Exercise Recognition Tool for E-Learning Education</alt-title><alt-title alt-title-type="right-running-head">Student&#x2019;s Health Exercise Recognition Tool for E-Learning Education</alt-title>
</title-group>
<contrib-group content-type="authors">
<contrib id="author-1" contrib-type="author">
<name name-style="western"><surname>Shloul</surname><given-names>Tamara al</given-names></name>
<xref ref-type="aff" rid="aff-1">1</xref>
</contrib>
<contrib id="author-2" contrib-type="author">
<name name-style="western"><surname>Javeed</surname><given-names>Madiha</given-names></name>
<xref ref-type="aff" rid="aff-2">2</xref>
</contrib>
<contrib id="author-3" contrib-type="author">
<name name-style="western"><surname>Gochoo</surname><given-names>Munkhjargal</given-names></name>
<xref ref-type="aff" rid="aff-3">3</xref>
</contrib>
<contrib id="author-4" contrib-type="author">
<name name-style="western"><surname>Alsuhibany</surname><given-names>Suliman A.</given-names></name>
<xref ref-type="aff" rid="aff-4">4</xref>
</contrib>
<contrib id="author-5" contrib-type="author">
<name name-style="western"><surname>Ghadi</surname><given-names>Yazeed Yasin</given-names></name>
<xref ref-type="aff" rid="aff-5">5</xref>
</contrib>
<contrib id="author-6" contrib-type="author">
<name name-style="western"><surname>Jalal</surname><given-names>Ahmad</given-names></name>
<xref ref-type="aff" rid="aff-2">2</xref>
</contrib>
<contrib id="author-7" contrib-type="author" corresp="yes">
<name name-style="western"><surname>Park</surname><given-names>Jeongmin</given-names></name>
<xref ref-type="aff" rid="aff-6">6</xref><email>jmpark@kpu.ac.kr</email>
</contrib>
<aff id="aff-1"><label>1</label><institution>Department of Humanities and Social Science, Al Ain University</institution>, <addr-line>Al Ain, 15551</addr-line>, <country>UAE</country></aff>
<aff id="aff-2"><label>2</label><institution>Department of Computer Science, Air University</institution>, <addr-line>Islamabad, 44000</addr-line>, <country>Pakistan</country></aff>
<aff id="aff-3"><label>3</label><institution>Department of Computer Science and Software Engineering, United Arab Emirates University</institution>, <addr-line>Al Ain, 15551</addr-line>, <country>UAE</country></aff>
<aff id="aff-4"><label>4</label><institution>Department of Computer Science, College of Computer, Qassim University</institution>, <addr-line>Buraydah, 51452</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff-5"><label>5</label><institution>Department of Computer Science and Software Engineering, Al Ain University</institution>, <addr-line>Al Ain, 15551</addr-line>, <country>UAE</country></aff>
<aff id="aff-6"><label>6</label><institution>Department of Computer Engineering, Korea Polytechnic University</institution>, <addr-line>Siheung-si, Gyeonggi-do, 237</addr-line>, <country>Korea</country></aff>
</contrib-group><author-notes><corresp id="cor1"><label>&#x002A;</label>Corresponding Author: Jeongmin Park. Email: <email>jmpark@kpu.ac.kr</email></corresp></author-notes>
<pub-date pub-type="epub" date-type="pub" iso-8601-date="2022-05-30"><day>30</day>
<month>05</month>
<year>2022</year></pub-date>
<volume>35</volume>
<issue>1</issue>
<fpage>149</fpage>
<lpage>161</lpage>
<history>
<date date-type="received"><day>14</day><month>12</month><year>2021</year></date>
<date date-type="accepted"><day>27</day><month>1</month><year>2022</year></date>
</history>
<permissions>
<copyright-statement>&#x00A9; 2023 Shloul et al.</copyright-statement>
<copyright-year>2023</copyright-year>
<copyright-holder>Shloul et al.</copyright-holder>
<license xlink:href="https://creativecommons.org/licenses/by/4.0/">
<license-p>This work is licensed under a <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution 4.0 International License</ext-link>, which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.</license-p>
</license>
</permissions>
<self-uri content-type="pdf" xlink:href="TSP_IASC_26051.pdf"></self-uri>
<abstract>
<p>Due to the recently increased requirements of e-learning systems, multiple educational institutes such as kindergarten have transformed their learning towards virtual education. Automated student health exercise is a difficult task but an important one due to the physical education needs especially in young learners. The proposed system focuses on the necessary implementation of student health exercise recognition (SHER) using a modified Quaternion-based filter for inertial data refining and data fusion as the pre-processing steps. Further, cleansed data has been segmented using an overlapping windowing approach followed by patterns identification in the form of static and kinematic signal patterns. Furthermore, these patterns have been utilized to extract cues for both patterned signals, which are further optimized using Fisher&#x2019;s linear discriminant analysis (FLDA) technique. Finally, the physical exercise activities have been categorized using extended Kalman filter (EKF)-based neural networks. This system can be implemented in multiple educational establishments including intelligent training systems, virtual mentors, smart simulations, and interactive learning management methods.</p>
</abstract>
<kwd-group kwd-group-type="author">
<kwd>E-learning</kwd>
<kwd>exercise recognition</kwd>
<kwd>online physical education</kwd>
<kwd>student&#x2019;s healthcare</kwd>
</kwd-group>
</article-meta>
</front>
<body>
<sec id="s1">
<label>1</label>
<title>Introduction</title>
<p>E-learning has become a necessary part of our daily lives in this digital era, where information is flowing from minds and books to virtually everywhere through online systems. Automated systems have become more progressive in the last couple of decades as compared to offline learning methods. Given the demands of online education, physical education has also become an important aspect of young learners&#x2019; health training. Machine learning is a field altering the e-learning domain by providing adaptive e-learning algorithms and techniques for robust learning outcomes [<xref ref-type="bibr" rid="ref-1">1</xref>]. SHER is a part of the e-learning domain and has also become a challenging task to be investigated by researchers [<xref ref-type="bibr" rid="ref-2">2</xref>]. SHER consists of different physical actions including daily routine work, walking, jogging, exercising, etc.</p>
<p>The focus of this research article is on the physical exercises conducted online for young students that have a few factors to be taken care of such as alike exercise postures can cause analogous signal peaks, low-quality systems giving poor results, and precisions are not up to the mark [<xref ref-type="bibr" rid="ref-3">3</xref>]. Therefore, this research idea has gained popularity and valuable utilization in many practical applications such as e-learning [<xref ref-type="bibr" rid="ref-4">4</xref>], human activity recognition [<xref ref-type="bibr" rid="ref-5">5</xref>], patterns recognition [<xref ref-type="bibr" rid="ref-6">6</xref>], smart physical education [<xref ref-type="bibr" rid="ref-7">7</xref>], and intelligent mentors [<xref ref-type="bibr" rid="ref-8">8</xref>]. In the online physical education (OPE) domain, a few researchers have contributed their work based on a variety of features and machine learning techniques being utilized. In this article, a unique method has been proposed for SHER using a modified Quaternion-based filter for raw data cleansing and EKF-based neural networks for classification. The modified Quaternion-based filter has been designed and proposed for raw inertial measurement unit (IMU) data cleansing. Further, a robust system has been designed for static and kinematic patterns identification and cues extraction based on two different domains. Moreover, FLDA technique has been utilized for the cues optimization and an EKF-based neural networks approach has been proposed for the classification of online physical activities. A publicly available dataset named enabling real-time mistake detection &#x0026; corrective feedback for free-weights exercises (ERICA) [<xref ref-type="bibr" rid="ref-9">9</xref>] has been utilized as a benchmark for the proposed system and attained significant improvement in SHER over the other state-of-the-art methods.</p>
<p>The article has been organized as follows. Section 2 gives a brief description of related work been accomplished in the topic on hand. Then, Section 3 specifies the architecture flow of the proposed SHER system along with the details of each phase. Section 4 presents the performance assessment of the proposed process in the previous section. Additionally, Section 5 contributes towards a brief discussion about the proposed health exercise tool and its shortcomings. Lastly, Section 6 concludes the subject matter and provides a couple of forthcoming directions to get an improved system.</p>
</sec>
<sec id="s2">
<label>2</label>
<title>Literature Review</title>
<p>In this research, the SHER tool has been proposed over an exercise dataset. A large number of studies have already been carried to propose methodologies for OPE improvements. A prime concern is to detect the exercise type, intensity, and associated parameters. However, this section discusses the drawbacks of traditional systems and the benefits of well-designed systems.</p>
<sec id="s2_1">
<label>2.1</label>
<title>OPE via Traditional Systems</title>
<p>Typically, the researchers proposed to place sensors at multiple parts of the human body, extract distinguished cues, and train those cues over some machine learning algorithm. For instance, authors in [<xref ref-type="bibr" rid="ref-10">10</xref>] proposed a similar model to acquire data from physical activities, pre-process it, and extract features. Then, they used features selection and a genetic algorithm to classify the physical activities. Reference [<xref ref-type="bibr" rid="ref-11">11</xref>] describes a similar method to gain raw sensor signals from human motion and heart. Next, multiple filters and sliding windows had been applied to segment filtered data. Then, multiple features were extracted and a feature-to-feature fusion had been applied to combine the extracted features. Multi-layer sequential forward selection had been proposed for further features dimensionality reduction followed by a Gaussian mixture model and Gaussian mixture regression to extract the keywords. These keywords were further provided to deep belief networks using restricted Boltzmann machines in order to classify physical activities. Another similar conventional model had been proposed in [<xref ref-type="bibr" rid="ref-12">12</xref>] by Khan et al. It extracted acceleration signals followed by noise reduction. Then, spectral entropy along with linear discriminant analysis had been calculated and neural networks were utilized for the classification. However, these customary techniques failed to provide desired performance, which will be addressed using the proposed SHER system.</p>
</sec>
<sec id="s2_2">
<label>2.2</label>
<title>OPE via Well-Designed Systems</title>
<p>There are also a few well-structured methods present in the literature that helped in achieving better OPE activity recognition than the conventional methods mentioned above. Morillo et al. in [<xref ref-type="bibr" rid="ref-13">13</xref>] explained about physical activity recognition method using smartphones, where they used an accelerometer to gain data and temporal windows followed by noise removal via filters. Next, they extracted features from time, Fourier transform, and frequency domains. Then, they utilized the Ameva algorithm for each variable in order to get intervals of each feature and its associated activity. Further, their system retrieved the relative probabilities of each activity to associate it with each interval&#x2019;s class matrix. The system was sophisticated enough to help in attaining higher accuracies. Another well-designed model had been proposed in [<xref ref-type="bibr" rid="ref-14">14</xref>]. It included data acquisition, framing, and pre-processing stages. Next, a mixture of features had been extracted from the filtered data and combined together followed by a series of steps for features selection and pre-classification via grey wolf optimization. Finally, the decision trees, genetic algorithms, and support vector machines had been utilized for classification. The unique process aided in accomplishing good results. Therefore, it can be observed that a well-designed methodology is important to achieve good results for OPE. Although these systems provided good results but they lacked in a few perspectives, which is the focus of this research.</p>
</sec>
</sec>
<sec id="s3">
<label>3</label>
<title>Material and Methods</title>
<p>The proposed system comprises filtering, sliding windows segmentation [<xref ref-type="bibr" rid="ref-15">15</xref>], and patterns identification as initial phases of the pre-processing stage. After the patterns have been identified as static and kinematic, the cues for both patterns have been computed using cepstral coefficients and time domains. Next, FDA [<xref ref-type="bibr" rid="ref-16">16</xref>] has been applied to symbolize the optimized cues from both patterned data together. Lastly, an EKF using neural networks [<xref ref-type="bibr" rid="ref-17">17</xref>] has been applied to classify the OPE exercises. An overview of the proposed system has been presented in <xref ref-type="fig" rid="fig-1">Fig. 1</xref> via multiple stages.</p>
<fig id="fig-1">
<label>Figure 1</label>
<caption>
<title>The architecture flow diagram of the proposed SHER for OPE</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_26051-fig-1.png"/>
</fig>
<sec id="s3_1">
<label>3.1</label>
<title>Pre-Processing Students&#x2019; Exercise Data</title>
<p>The proposed model consists of three phases to pre-process raw data. A modified Quaternion-based IMU filter has been proposed for cleansing raw data. Further, the filtered data has been segmented and patterns have been identified as described below.</p>
<sec id="s3_1_1">
<label>3.1.1</label>
<title>Modified Quaternion-Based IMU Filter</title>
<p>This study proposed a modified Quaternion-based filter for IMU data. Acceleration and gyroscope data have been taken from the ERICA dataset and a calibration stage has been proposed to remove missing values and biasness from the signals [<xref ref-type="bibr" rid="ref-18">18</xref>]. Then, an error correction phase has been introduced in order to get errors removed from the signals. Next, the data has been normalized using gradient descent and Quaternion techniques. Finally, the IMU sensors&#x2019; signals have been fused to get the normalized data. <xref ref-type="table" rid="table-1">Tab. 1</xref> elucidates the Quaternion-based IMU filter algorithm (QIFA) in detail.</p>
<table-wrap id="table-1"><label>Table 1</label>
<caption>
<title>Quaternion-based IMU filter algorithm</title></caption>
<table><colgroup><col align="left"/>
</colgroup>
<tbody>
<tr>
<td align="left"><bold>Input:</bold> ACC &#x003D; acceleration data (x, y, z)<break/><bold>Input</bold>: GYRO &#x003D; gyroscope data (x, y, z).<break/><bold>Output</bold>: QIFA_signal &#x003D; filtered IMU signals<break/><bold>S</bold> &#x003D; gravitationalError(ACC)<break/><bold>C</bold> &#x003D; gyroDrift(GYRO)<break/><bold>Repeat</bold><break/>&#x2002;&#x2002;&#x2002;&#x2002;<bold>ALF</bold> &#x003D; butterworth_lowpass(ACC, GYRO, S, C)<break/>&#x2002;&#x2002;&#x2002;&#x2002;<bold>AHF</bold> &#x003D; butterworth_highpass(ACC, GYRO, S, C)<break/>&#x2002;&#x2002;&#x2002;&#x2002;<bold>EALF</bold> &#x003D; error_correction(ALF);<break/>&#x2002;&#x2002;&#x2002;&#x2002;<bold>EAHF</bold> &#x003D; error_correction(AHF);<break/>&#x2002;&#x2002;&#x2002;&#x2002;<bold>ANF</bold> &#x003D; normalize(ALF, AHF)<break/>&#x2002;&#x2002;&#x2002;&#x2002;<bold>QIFA_signal</bold> &#x003D; fuse(gradientDescent(Quaternion(ANF)))<break/><bold>Until</bold> all samples exhausted&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;&#x2003;</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3_1_2">
<label>3.1.2</label>
<title>Sliding Windows</title>
<p>To further pre-process the fused data, this article has proposed to utilize sliding overlapping windows [<xref ref-type="bibr" rid="ref-19">19</xref>] for the filtered data. Overlapping windows have been proposed to make sure there are no missed data. It helped in dividing the data into the windows of five seconds data each. <xref ref-type="fig" rid="fig-2">Fig. 2</xref> presents the sliding overlapping windows for IMU fused signal.</p>
<fig id="fig-2">
<label>Figure 2</label>
<caption>
<title>Sliding overlapping windows for IMU fused data</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_26051-fig-2.png"/>
</fig>
</sec>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Patterns Identification</title>
<p>The five seconds segmented IMU data has further been used to get two types of patterns in order to pre-classify OPE activities of young learners. The two kinds of patterns are based on the activity signal strength i.e., static patterns and kinematic patterns. Pearson correlation coefficient has been utilized to determine the relationship between two variables [<xref ref-type="bibr" rid="ref-20">20</xref>] as;<disp-formula id="eqn-1"><label>(1)</label>
<mml:math id="mml-eqn-1" display="block"><mml:mrow><mml:mi mathvariant="bold-italic">c</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mi>e</mml:mi><mml:mi>f</mml:mi><mml:mi>f</mml:mi></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /></mml:mrow><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mfrac><mml:mrow><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mo>&#x2061;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mi mathvariant="bold-italic">a</mml:mi></mml:mrow><mml:mrow><mml:mi mathvariant="bold">i</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mrow><mml:mover><mml:mi mathvariant="bold-italic">a</mml:mi><mml:mo stretchy="false">&#x00AF;</mml:mo></mml:mover></mml:mrow></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mi mathvariant="bold-italic">b</mml:mi></mml:mrow><mml:mrow><mml:mi mathvariant="bold">i</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mrow><mml:mover><mml:mi mathvariant="bold-italic">b</mml:mi><mml:mo stretchy="false">&#x00AF;</mml:mo></mml:mover></mml:mrow></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mrow><mml:msqrt><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mo>&#x2061;</mml:mo><mml:msup><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mrow></mml:mrow><mml:msub><mml:mrow><mml:mi mathvariant="bold-italic">a</mml:mi></mml:mrow><mml:mrow><mml:mi mathvariant="bold">i</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mrow><mml:mover><mml:mi mathvariant="bold-italic">a</mml:mi><mml:mo stretchy="false">&#x00AF;</mml:mo></mml:mover></mml:mrow></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:msup><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mo>&#x2061;</mml:mo><mml:msup><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mi mathvariant="bold-italic">b</mml:mi></mml:mrow><mml:mrow><mml:mi mathvariant="bold">i</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mrow><mml:mover><mml:mi mathvariant="bold-italic">b</mml:mi><mml:mo stretchy="false">&#x00AF;</mml:mo></mml:mover></mml:mrow></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:msqrt></mml:mrow></mml:mfrac></mml:mrow></mml:mstyle></mml:math>
</disp-formula>where <italic>a<sub>i</sub></italic> are the values of the first variable in a window, <inline-formula id="ieqn-1">
<mml:math id="mml-ieqn-1"><mml:mrow><mml:mover><mml:mi>a</mml:mi><mml:mo stretchy="false">&#x00AF;</mml:mo></mml:mover></mml:mrow></mml:math>
</inline-formula> is the mean of values in the first variable, <italic>b</italic><sub><italic>i</italic></sub> are the values of the second variable in a window, and <inline-formula id="ieqn-2">
<mml:math id="mml-ieqn-2"><mml:mrow><mml:mover><mml:mi>b</mml:mi><mml:mo stretchy="false">&#x00AF;</mml:mo></mml:mover></mml:mrow></mml:math>
</inline-formula> is the mean of the values in the second variable. <xref ref-type="fig" rid="fig-3">Fig. 3</xref> represents the Pearson correlation coefficients calculated for a window&#x2019;s samples separately. Then, a threshold of 0.1 (shown as red dotted line in <xref ref-type="fig" rid="fig-3">Fig. 3</xref>) has been projected to excerpt the static and kinematic patterns.</p>
<fig id="fig-3">
<label>Figure 3</label>
<caption>
<title>Static and kinematic patterns based on pearson correlation coefficients (threshold&#x2009;&#x003D;&#x2009;0.1)</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_26051-fig-3.png"/>
</fig>
<sec id="s3_2_1">
<label>3.2.1</label>
<title>Static Patterns</title>
<p>Static patterned category activities are those actions, which have a Pearson correlation coefficient below a defined level such as 0.1. The physical actions performed by learners include those actions that have few variations in the motion signals like IMU. Examples of such physical activities are exercises done while sitting or lying on the floor.</p>
</sec>
<sec id="s3_2_2">
<label>3.2.2</label>
<title>Kinematic Patterns</title>
<p>An activity is considered to be in the kinematic patterned category if the window&#x2019;s Pearson correlation coefficient is above a certain threshold i.e., 0.1. Kinematic patterned activities are generally those physical actions, whose signals have more variations due to movement diversity such as running, jogging, jumping, etc.</p>
</sec>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Cues Extraction</title>
<p>Multi-domain cues have been extracted from the patterned activity signals. For kinematic patterned signals, Hilbert-Huang transform (HHT) and Teager energy cepstral coefficients (TECCs) have been extracted. However, for static patterned signals, linear prediction cepstral coefficients (LPCCs) and synchro-squeezing transform (SST) have been mined.</p>
<sec id="s3_3_1">
<label>3.3.1</label>
<title>Hibert-Huang Transform</title>
<p>HHT is the new theory for IMU signal analysis, which helps to decompose data into multiple intrinsic mode functions by using the empirical mode decomposition method. It also represents the signals into energy distribution in the time-frequency domain known as the Hilbert spectrum. HHT works wonders for the non-stationary and non-linear data [<xref ref-type="bibr" rid="ref-21">21</xref>]. The raw data <italic>D(t)</italic> can be calculated as;<disp-formula id="eqn-2"><label>(2)</label>
<mml:math id="mml-eqn-2" display="block"><mml:mrow><mml:mi mathvariant="normal">D</mml:mi></mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi mathvariant="normal">t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:munderover><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>t</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>m</mml:mi></mml:munderover><mml:mo>&#x2061;</mml:mo><mml:msub><mml:mi>f</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo>+</mml:mo><mml:msub><mml:mi>r</mml:mi><mml:mi>m</mml:mi></mml:msub></mml:math>
</disp-formula></p>
<p>where, <italic>f<sub>i</sub></italic> represents the <italic>i</italic>th IMF and <italic>r<sub>m</sub></italic> is the residue. Furthermore, it can also measure the changes in signals using instantaneous frequencies and helps in calculating the cues of the OPE actions [<xref ref-type="bibr" rid="ref-22">22</xref>]. <xref ref-type="fig" rid="fig-4">Fig. 4</xref> explains the movement variations calculated through HHT.</p>
<fig id="fig-4">
<label>Figure 4</label>
<caption>
<title>HHT movement variations for a) Lateral raises and b) Biceps curls</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_26051-fig-4.png"/>
</fig>
</sec>
<sec id="s3_3_2">
<label>3.3.2</label>
<title>Teager Energy Cepstral Coefficients</title>
<p>Teager energy operator is quite efficient in detecting disturbances in signals by calculating the energy of the signal [<xref ref-type="bibr" rid="ref-23">23</xref>]. It is used to estimate the true total energy of the signal that is utilized to extract TECCs. Teager energy operator (TEO) is defined as;<disp-formula id="eqn-3"><label>(3)</label>
<mml:math id="mml-eqn-3" display="block"><mml:mi>E</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:msup><mml:mi>x</mml:mi><mml:mn>2</mml:mn></mml:msup><mml:mo stretchy="false">[</mml:mo><mml:mi>n</mml:mi><mml:mo stretchy="false">]</mml:mo><mml:mo>&#x2212;</mml:mo><mml:mi>x</mml:mi><mml:mo stretchy="false">[</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo stretchy="false">]</mml:mo><mml:mi>x</mml:mi><mml:mo stretchy="false">[</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo stretchy="false">]</mml:mo></mml:math>
</disp-formula>where, <italic>x</italic> is the sample window and <italic>n</italic> is the number of windows. TECCs are similar to mel-frequency cepstral coefficients [<xref ref-type="bibr" rid="ref-24">24</xref>] and consists of pre-processing, Gabor filterbank, TEO, framing, averaging, log, and discrete cosine transform along with cepstral mean subtraction phases [<xref ref-type="bibr" rid="ref-25">25</xref>]. <xref ref-type="fig" rid="fig-5">Fig. 5</xref> displays the effects of a TEO over the patterned signal window.</p>
<fig id="fig-5">
<label>Figure 5</label>
<caption>
<title>Effect of TEO over a patterned signal window</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_26051-fig-5.png"/>
</fig>
</sec>
<sec id="s3_3_3">
<label>3.3.3</label>
<title>Linear Prediction Cepstral Coefficients</title>
<p>Linear prediction cepstral coefficients have been utilized to get the complex action patterns from the signal and its transfer function. The cepstrum has been able to show the rate of change for a variety of bands. The LPCCs have been extracted from linear prediction coefficients <italic>c<sub>n</sub> </italic>by using the recursive relationship between the predictor coefficients and the cepstral coefficients as;</p>
<p><disp-formula id="eqn-4"><label>(4)</label>
<mml:math id="mml-eqn-4" display="block"><mml:msub><mml:mi>C</mml:mi><mml:mi>o</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /><mml:mtext>&#xA0;</mml:mtext><mml:mi mathvariant="normal">l</mml:mi><mml:mi mathvariant="normal">n</mml:mi></mml:mrow><mml:mo>.</mml:mo><mml:msup><mml:mi>&#x03C3;</mml:mi><mml:mn>2</mml:mn></mml:msup></mml:math>
</disp-formula></p>
<p><disp-formula id="eqn-5"><label>(5)</label>
<mml:math id="mml-eqn-5" display="block"><mml:msub><mml:mi>c</mml:mi><mml:mi>m</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:msub><mml:mi>a</mml:mi><mml:mi>m</mml:mi></mml:msub><mml:mo>+</mml:mo><mml:mrow><mml:mspace width="thickmathspace" /><mml:mtext>&#xA0;</mml:mtext></mml:mrow><mml:munderover><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>n</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>m</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mo>&#x2061;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mfrac><mml:mi>n</mml:mi><mml:mi>m</mml:mi></mml:mfrac></mml:mrow></mml:mstyle></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mspace width="thickmathspace" /><mml:msub><mml:mi>c</mml:mi><mml:mi>n</mml:mi></mml:msub><mml:mspace width="thickmathspace" /><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>m</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>n</mml:mi></mml:mrow></mml:msub><mml:mo>,</mml:mo><mml:mspace width="thickmathspace" /><mml:mrow><mml:mspace width="thickmathspace" /><mml:mtext>&#xA0;</mml:mtext><mml:mspace width="thickmathspace" /><mml:mtext>&#xA0;</mml:mtext><mml:mspace width="thickmathspace" /><mml:mtext>&#xA0;</mml:mtext><mml:mspace width="thickmathspace" /><mml:mtext>&#xA0;</mml:mtext></mml:mrow><mml:mn>1</mml:mn><mml:mrow><mml:mspace width="thickmathspace" /><mml:mtext>&#xA0;</mml:mtext></mml:mrow><mml:mo>&#x2264;</mml:mo><mml:mi>m</mml:mi><mml:mrow><mml:mspace width="thickmathspace" /><mml:mtext>&#xA0;</mml:mtext></mml:mrow><mml:mo>&#x2264;</mml:mo><mml:mi>p</mml:mi></mml:math>
</disp-formula></p>
<p><disp-formula id="eqn-6"><label>(6)</label>
<mml:math id="mml-eqn-6" display="block"><mml:msub><mml:mi>c</mml:mi><mml:mi>m</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:munderover><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>n</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>m</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:munderover><mml:mo>&#x2061;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mfrac><mml:mi>n</mml:mi><mml:mi>m</mml:mi></mml:mfrac></mml:mrow></mml:mstyle></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mspace width="thickmathspace" /><mml:msub><mml:mi>c</mml:mi><mml:mi>n</mml:mi></mml:msub><mml:mspace width="thickmathspace" /><mml:msub><mml:mi>a</mml:mi><mml:mrow><mml:mi>m</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>n</mml:mi></mml:mrow></mml:msub><mml:mo>,</mml:mo><mml:mspace width="thickmathspace" /><mml:mrow><mml:mspace width="thickmathspace" /><mml:mtext>&#xA0;</mml:mtext><mml:mspace width="thickmathspace" /><mml:mtext>&#xA0;</mml:mtext><mml:mspace width="thickmathspace" /><mml:mtext>&#xA0;</mml:mtext><mml:mspace width="thickmathspace" /><mml:mtext>&#xA0;</mml:mtext></mml:mrow><mml:mi>p</mml:mi><mml:mspace width="thickmathspace" /><mml:mo>&#x2264;</mml:mo><mml:mi>m</mml:mi><mml:mrow><mml:mspace width="thickmathspace" /><mml:mtext>&#xA0;</mml:mtext></mml:mrow><mml:mo>&#x2264;</mml:mo><mml:mi>d</mml:mi></mml:math>
</disp-formula></p>
<p>where <italic>&#x03C3;</italic><sup>2</sup> is the gain in linear prediction analysis, <italic>d</italic> is the number of LPCCs, and <italic>a</italic><sub><italic>m</italic></sub> is the linear prediction coefficient [<xref ref-type="bibr" rid="ref-26">26</xref>].</p>
</sec>
<sec id="s3_3_4">
<label>3.3.4</label>
<title>Synchro-Squeezing Transform</title>
<p>The SST is an analysis method in the time-frequency domain. It decomposes complex motion signals into time-varying oscillatory components [<xref ref-type="bibr" rid="ref-27">27</xref>]. First, the continuous wavelet transform has been calculated as;<disp-formula id="eqn-7"><label>(7)</label>
<mml:math id="mml-eqn-7" display="block"><mml:mi>W</mml:mi><mml:msub><mml:mi>T</mml:mi><mml:mi>x</mml:mi></mml:msub><mml:mspace width="thickmathspace" /><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>a</mml:mi><mml:mo>,</mml:mo><mml:mspace width="thickmathspace" /><mml:mi>b</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:msqrt><mml:mi>a</mml:mi></mml:msqrt></mml:mrow></mml:mfrac></mml:mrow><mml:mspace width="thickmathspace" /><mml:munderover><mml:mrow><mml:mo>&#x222B;</mml:mo></mml:mrow><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mi mathvariant="normal">&#x221E;</mml:mi></mml:mrow><mml:mi mathvariant="normal">&#x221E;</mml:mi></mml:munderover><mml:mo>&#x2061;</mml:mo><mml:mi>x</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mover><mml:mrow><mml:mi>&#x03C6;</mml:mi><mml:mspace width="thickmathspace" /><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mfrac><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>b</mml:mi></mml:mrow><mml:mi>a</mml:mi></mml:mfrac></mml:mrow></mml:mstyle></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mspace width="thickmathspace" /><mml:mi>d</mml:mi><mml:mi>t</mml:mi></mml:mrow><mml:mo accent="false">&#x00AF;</mml:mo></mml:mover></mml:mstyle></mml:math>
</disp-formula>where, <italic>WT<sub>x</sub></italic> denotes the continuous wavelet transform of a signal window <italic>x(t)</italic>, <italic>a</italic> is the scale factor, and <italic>b</italic> is the time translation factor. &#x03C6; (<italic>t</italic>) is a mother wavelet and further instantaneous frequency has also been calculated. Finally, the SST has been represented as;<disp-formula id="eqn-8"><label>(8)</label>
<mml:math id="mml-eqn-8" display="block"><mml:mi>S</mml:mi><mml:mi>S</mml:mi><mml:msub><mml:mi>T</mml:mi><mml:mi>x</mml:mi></mml:msub><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mi>&#x03C9;</mml:mi><mml:mi>l</mml:mi></mml:msub><mml:mo>,</mml:mo><mml:mspace width="thickmathspace" /><mml:mspace width="thickmathspace" /><mml:mi>b</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:mi mathvariant="normal">&#x0394;</mml:mi><mml:mi>&#x03C9;</mml:mi></mml:mrow></mml:mfrac></mml:mrow><mml:mspace width="thickmathspace" /><mml:munder><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:msub><mml:mi>a</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo>&#x003A;</mml:mo><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mrow><mml:mi>&#x03C9;</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mi>a</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo>,</mml:mo><mml:mi>b</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>&#x03C9;</mml:mi><mml:mi>l</mml:mi></mml:msub></mml:mrow><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mo>&#x2264;</mml:mo><mml:mrow><mml:mfrac><mml:mrow><mml:mi mathvariant="normal">&#x0394;</mml:mi><mml:mi>&#x03C9;</mml:mi></mml:mrow><mml:mn>2</mml:mn></mml:mfrac></mml:mrow></mml:mrow></mml:munder><mml:mo>&#x2061;</mml:mo><mml:mi>W</mml:mi><mml:msub><mml:mi>T</mml:mi><mml:mi>x</mml:mi></mml:msub><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mi>a</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo>,</mml:mo><mml:mspace width="thickmathspace" /><mml:mi>b</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:msubsup><mml:mi>a</mml:mi><mml:mi>i</mml:mi><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mfrac><mml:mn>3</mml:mn><mml:mn>2</mml:mn></mml:mfrac></mml:mrow></mml:mrow></mml:msubsup><mml:mi mathvariant="normal">&#x0394;</mml:mi><mml:msub><mml:mi>a</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mstyle></mml:math>
</disp-formula>where, <italic>a<sub>i</sub></italic> is the <italic>i</italic><sup>th</sup> scale value and &#x0394;<italic>a</italic><sub><italic>i</italic></sub>&#x2009;&#x003D;&#x2009; <italic>a</italic><sub><italic>i</italic></sub>&#x2009;&#x2212;&#x2009;<italic>a</italic><sub><italic>i</italic>&#x2212;1</sub>. <italic>&#x03C9;</italic><sub><italic>l</italic></sub> represents the <italic>l</italic><sup>th</sup> frequency point [<xref ref-type="bibr" rid="ref-28">28</xref>]. <xref ref-type="fig" rid="fig-6">Fig. 6</xref> gives a SST for a window of filtered and patterned inertial data.</p>
<fig id="fig-6">
<label>Figure 6</label>
<caption>
<title>SST for a window of inertial data</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_26051-fig-6.png"/>
</fig>
</sec>
</sec>
<sec id="s3_4">
<label>3.4</label>
<title>Cues Optimization</title>
<p>Codebook generation is important to symbolize the cues [<xref ref-type="bibr" rid="ref-29">29</xref>] further in order to maximize the between-activity variance and minimize the within-activity variance. This research method proposed FLDA technique for cues symbolization. FLDA is employed to extract the linear discriminating cues that categorized the physical actions performed in an e-learning environment [<xref ref-type="bibr" rid="ref-16">16</xref>]. <xref ref-type="fig" rid="fig-7">Fig. 7</xref> shows that by applying FLDA over the ERICA dataset, the system has been successful in finding the discriminating cues for all three OPE actions.</p>
<fig id="fig-7">
<label>Figure 7</label>
<caption>
<title>Optimized and segregated cues using FLDA technique</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_26051-fig-7.png"/>
</fig>
</sec>
<sec id="s3_5">
<label>3.5</label>
<title>Classification for OPE</title>
<p>This model has proposed a unique method for the OPE activities classification. EKF has been used as an effective learning mechanism for training neural networks in a supervised learning environment. EKF is more efficient than other fine-tuning methods such as backpropagation because it considers the second-order information about the error surface that is important for the covariance matrix training [<xref ref-type="bibr" rid="ref-17">17</xref>]. Although EKF is more costly than backpropagation in terms of orders of magnitude, EKF keeps on updating the covariance matrix with the second-order information, which leads to chances of the training process coming to a full stop at a local minima point. <xref ref-type="disp-formula" rid="eqn-9">Eq. (9)</xref> explains the EKF implementation reduced to formula as:<disp-formula id="eqn-9"><label>(9)</label>
<mml:math id="mml-eqn-9" display="block"><mml:mi mathvariant="normal">&#x0394;</mml:mi><mml:msub><mml:mi>w</mml:mi><mml:mi>j</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mi>a</mml:mi><mml:mi>p</mml:mi><mml:munderover><mml:mrow><mml:mo movablelimits="false">&#x2211;</mml:mo></mml:mrow><mml:mrow><mml:mi>m</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>N</mml:mi></mml:munderover><mml:mo>&#x2061;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mi>d</mml:mi><mml:mi>m</mml:mi></mml:msub><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>z</mml:mi><mml:mi>m</mml:mi></mml:msub></mml:mrow><mml:mo stretchy="false">)</mml:mo><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mfrac><mml:mrow><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:msub><mml:mi>z</mml:mi><mml:mi>m</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:msub><mml:mi>w</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:mfrac></mml:mrow></mml:mstyle></mml:math>
</disp-formula>where, <italic>w</italic> is the vector for weights and thresholds, <italic>z</italic> is the output of the system at <italic>m</italic> window, (<italic>d</italic><sub><italic>m</italic></sub>&#x2009;&#x2212;&#x2009;<italic>z</italic><sub><italic>m</italic></sub>) is the residual as the difference between expected and actual observation, and <inline-formula id="ieqn-3">
<mml:math id="mml-ieqn-3"><mml:mrow><mml:mfrac><mml:mrow><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:msub><mml:mi>z</mml:mi><mml:mi>m</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mi mathvariant="normal">&#x2202;</mml:mi><mml:msub><mml:mi>w</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:mfrac></mml:mrow></mml:math>
</inline-formula> is the gradient matrix [<xref ref-type="bibr" rid="ref-30">30</xref>]. Furthermore, neural networks have been utilized to train the classifier followed by fine-tuning using EKF. <xref ref-type="fig" rid="fig-8">Fig. 8</xref> elucidates the procedure of classification via EKF [<xref ref-type="bibr" rid="ref-31">31</xref>] and neural network further.</p>
<fig id="fig-8">
<label>Figure 8</label>
<caption>
<title>Classification method using EKF [<xref ref-type="bibr" rid="ref-31">31</xref>] and neural networks</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_26051-fig-8.png"/>
</fig>
</sec>
</sec>
<sec id="s4">
<label>4</label>
<title>Experiments and Results</title>
<p>This section is further structured into three sub-sections. First, a benchmark dataset has been described in detail. Second, the experimental results and the analysis have been evaluated. Third, the state-of-the-art OPE healthcare tools have been compared to our proposed system&#x2019;s performance.</p>
<sec id="s4_1">
<label>4.1</label>
<title>Experimental Results</title>
<p>In this paper, physical exercise data has been utilized so that the variety of physical actions have been taken into consideration for the system to be more precise. For effective results, a 5-fold cross-validation method has been utilized over the exercise data. The number of training samples have been set according to cross-validation in order to minimize reconstruction errors [<xref ref-type="bibr" rid="ref-32">32</xref>].</p>
<sec id="s4_1_1">
<label>4.1.1</label>
<title>Experimental Data Setup: ERICA</title>
<p>For the experimentation of the proposed method, this research selected ERICA dataset. The dataset has been collected from the real-time online users who have used ERICA as a digital personal trainer. It consists of 33 participants between the ages of 20 to 40 years old. The self-rated expertise level varied from novice to expert where mostly participants rated themselves as intermediate leveled experts. There were four different sessions in the dataset. The session duration varied from 22 to 48 min each. The participants performed multiple exercises but the available dataset consists of biceps curls, lateral raises, and triceps extension [<xref ref-type="bibr" rid="ref-9">9</xref>]. <xref ref-type="fig" rid="fig-9">Fig. 9</xref> shows the exercises performed in the dataset.</p>
<fig id="fig-9">
<label>Figure 9</label>
<caption>
<title>Bottom and top positions for a) Biceps curls, b) Lateral raises, and c) Triceps extension</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_26051-fig-9.png"/>
</fig>
</sec>
<sec id="s4_1_2">
<label>4.1.2</label>
<title>Classification Results and Analysis</title>
<p>The goal of this research work is to enhance the e-learning domain by proposing a state-of-the-art OPE system for young students. The system should be able to recognize the OPE actions performed correctly and point out the mistakes. Hence, neural networks using EKF technique for the fine-tuning has been proposed, which is computationally expensive but results are enhanced when compared to other well-structured systems. The proposed system achieved 86&#x0025; accuracy rate giving 7&#x0025; false-negative rate for Lateral raises, 17&#x0025; for biceps curls, and 26&#x0025; for triceps extension. <xref ref-type="table" rid="table-2">Tab. 2</xref> gives the details of the confusion matrix retrieved from the neural network and EKF implementation.</p>
<table-wrap id="table-2"><label>Table 2</label>
<caption>
<title>Confusion matrix over the ERICA dataset</title></caption>
<table><colgroup><col align="left"/><col align="left"/><col align="left"/><col align="left"/>
</colgroup>
<thead>
<tr>
<th align="left"/>
<th align="left">Biceps curls</th>
<th align="left">Lateral raises</th>
<th align="left">Triceps extension</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left">Biceps curls</td>
<td align="left">83</td>
<td align="left">16</td>
<td align="left">1</td>
</tr>
<tr>
<td align="left">Lateral raises</td>
<td align="left">5</td>
<td align="left">93</td>
<td align="left">2</td>
</tr>
<tr>
<td align="left">Triceps extension</td>
<td align="left">5</td>
<td align="left">20</td>
<td align="left">74</td>
</tr>
<tr>
<td align="left" colspan="4">Mean accuracy&#x2009;&#x003D;&#x2009;86.0&#x0025;</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s4_1_3">
<label>4.1.3</label>
<title>Proposed System Comparison with State-of-the-Art Methods</title>
<p>Other state-of-the-art models have been selected for peer comparison in order to prove the effectiveness and proficiency of the proposed OPE system. According to <xref ref-type="table" rid="table-3">Tab. 3</xref>, Radhakrishnan et al. in [<xref ref-type="bibr" rid="ref-9">9</xref>] achieved 70&#x0025; accuracy by using frequency-domain cues with temporal features. The authors have tried 2-s and 5-s windows in [<xref ref-type="bibr" rid="ref-33">33</xref>], where the 5-s windows gave the recognition accuracy of 61&#x0025; only. Smartphone-based exercise training system COPDTrainer showed the lowest recognition rate for exercising as 77.5&#x0025; in [<xref ref-type="bibr" rid="ref-34">34</xref>], which is below any other recognition rates in the proposed system. Leg muscles have been monitored using a pressure sensing mechanism in [<xref ref-type="bibr" rid="ref-35">35</xref>] that achieved an accuracy rate of 81.7&#x0025; for gym exercises recognition. In a filter-based sensor fusion activity recognition system [<xref ref-type="bibr" rid="ref-36">36</xref>], the accuracy achieved using the Kalman filter is 84&#x0025;. In [<xref ref-type="bibr" rid="ref-37">37</xref>], Crema et al. proposed a work consisting of a well-structured strategy for exercise classification achieving an 85&#x0025; accuracy rate.</p>
<table-wrap id="table-3"><label>Table 3</label>
<caption>
<title>State-of-the-art comparison (in terms of accuracy) based on the well-designed systems</title></caption>
<table><colgroup><col align="left"/><col align="left"/>
</colgroup>
<thead>
<tr>
<th align="left">Systems</th>
<th align="left">Accuracy rate (&#x0025;)</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left">Activity recognition using 5-s window [<xref ref-type="bibr" rid="ref-33">33</xref>]</td>
<td align="left">61.0</td>
</tr>
<tr>
<td align="left">ERICA [<xref ref-type="bibr" rid="ref-9">9</xref>]</td>
<td align="left">70.0</td>
</tr>
<tr>
<td align="left">COPDTrainer: lowest recognition rate [<xref ref-type="bibr" rid="ref-34">34</xref>]</td>
<td align="left">77.5</td>
</tr>
<tr>
<td align="left">Gym exercises recognition [<xref ref-type="bibr" rid="ref-35">35</xref>]</td>
<td align="left">81.7</td>
</tr>
<tr>
<td align="left">Kalman filter based activity recognition [<xref ref-type="bibr" rid="ref-36">36</xref>]</td>
<td align="left">84.0</td>
</tr>
<tr>
<td align="left">Automatic exercise detection [<xref ref-type="bibr" rid="ref-37">37</xref>]</td>
<td align="left">85.0</td>
</tr>
<tr>
<td align="left">Proposed OPE system</td>
<td align="left">86.0</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
</sec>
</sec>
<sec id="s5">
<label>5</label>
<title>Discussion</title>
<p>The research works described in <xref ref-type="table" rid="table-3">Tab. 3</xref> achieved good accuracies but there is a need for improvement for real-time OPE. The proposed technique achieves state-of-the-art performance by comparing it to the existing methods of sophisticated SHER systems. The system is well-designed for healthcare and can support multiple applications of the method such as live gym classes and kids&#x2019; OPE. However, the proposed system lacks performance evaluation over larger datasets. Moreover, other machine learning and deep learning techniques can help get better results for the proposed OPE activities recognition system. Also, the one serious issue is the lack of interaction between students and teacher in OPE classes, which is causing a deficiency in conveying the value of physical education among students. A solution is to provide live feedback to students using a sophisticated system. <xref ref-type="fig" rid="fig-10">Fig. 10</xref> shows a system providing live feedback.</p>
<fig id="fig-10">
<label>Figure 10</label>
<caption>
<title>Live feedback provided by the system for OPE</title></caption>
<graphic mimetype="image" mime-subtype="png" xlink:href="IASC_26051-fig-10.png"/>
</fig>
</sec>
<sec id="s6">
<label>6</label>
<title>Conclusion</title>
<p>An innovative system for OPE activities has been proposed in this research article. The proposed approach refers to OPE recognition as a fundamental problem of this research work that is performed using multiple algorithms. Our proposed system is efficient in detecting the physical education exercises for e-learning, which is important for the OPE of students such as kindergarten kids. The proposed methodology consists of pre-processing stage via QIFA and sliding windowing approach. The system further identified static and kinematic patterns followed by cues extraction using HHT, TECCs, LPCCs, and SST. The cues are optimized using FLDA and OPE activities have been classified via EKF and neural network. The system can be applied to OPE classes of young learners, intelligent mentors, and interactive learning systems. Quick reaction and interaction are missing in online sessions, which is important for students in order to get more attentive and concerned about their health. Therefore, we will extend the proposed system in the future in order to provide live feedback to the learners. The students will get motivated and be able to grasp the value of physical education in an e-learning environment more effectively when they get live feedback for their OPE.</p>
</sec>
</body>
<back>
<ack>
<p>This research was supported by a Grant (2021R1F1A1063634) of the Basic Science Research Program through the National Research Foundation (NRF) funded by the Ministry of Education, Republic of Korea.</p>
</ack><fn-group>
<fn fn-type="other">
<p><bold>Funding Statement:</bold> The authors received no specific funding for this study.</p>
</fn>
<fn fn-type="conflict">
<p><bold>Conflicts of Interest:</bold> The authors declare that they have no conflicts of interest to report regarding the present study.</p>
</fn>
</fn-group>
<ref-list content-type="authoryear">
<title>References</title>
<ref id="ref-1"><label>[1]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S. S.</given-names> <surname>Khanal</surname></string-name>, <string-name><given-names>P.</given-names> <surname>Prasad</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Alsadoon</surname></string-name> and <string-name><given-names>A.</given-names> <surname>Maag</surname></string-name></person-group>, &#x201C;<article-title>A systematic review: Machine learning based recommendation systems for e-learning</article-title>,&#x201D; <source>Education and Information Technologies</source>, vol. <volume>25</volume>, pp. <fpage>2635</fpage>&#x2013;<lpage>2664</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-2"><label>[2]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Nadeem</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Kim</surname></string-name></person-group>, &#x201C;<article-title>Human actions tracking and recognition based on body parts detection via artificial neural network</article-title>,&#x201D; in <conf-name>Proc. of the 2020 3rd Int. Conf. on Advancements in Computational Sciences (ICACS)</conf-name>, <conf-loc>Lahore, Pakistan</conf-loc>, pp. <fpage>1</fpage>&#x2013;<lpage>6</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-3"><label>[3]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>S.</given-names> <surname>Ishii</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Yokokubo</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Luimula</surname></string-name> and <string-name><given-names>G.</given-names> <surname>Lopez</surname></string-name></person-group>, &#x201C;<article-title>ExerSense: Physical exercise recognition and counting algorithm from wearables robust to positioning</article-title>,&#x201D; <source>Sensors</source>, vol. <volume>21</volume>, no. <issue>1</issue>, pp. <fpage>91</fpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-4"><label>[4]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name> and <string-name><given-names>M.</given-names> <surname>Mahmood</surname></string-name></person-group>, &#x201C;<article-title>Students&#x2019; behavior mining in e-learning environment using cognitive processes with information technologies</article-title>,&#x201D; <source>Education and Information Technologies</source>, vol. <volume>24</volume>, pp. <fpage>2797</fpage>&#x2013;<lpage>2821</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-5"><label>[5]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>F.</given-names> <surname>Demrozi</surname></string-name>, <string-name><given-names>G.</given-names> <surname>Pravadelli</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Bihorac</surname></string-name> and <string-name><given-names>P.</given-names> <surname>Rashidi</surname></string-name></person-group>, &#x201C;<article-title>Human activity recognition using inertial, physiological and environmental sensors: A comprehensive survey</article-title>,&#x201D; in <source>IEEE Access</source>, vol. <volume>8</volume>, pp. <fpage>210816</fpage>&#x2013;<lpage>210836</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-6"><label>[6]</label><mixed-citation publication-type="book"><person-group person-group-type="author"><string-name><given-names>X.</given-names> <surname>Duan</surname></string-name> and <string-name><given-names>P.</given-names> <surname>Duan</surname></string-name></person-group>, &#x201C;<chapter-title>Research on pattern recognition method of online english education based on feature self learning</chapter-title>,&#x201D; in <source>e-Learning, e-Education, and Online Training (eLEOT)</source>. <publisher-name>Lecture Notes of the Institute for Computer Sciences, Social Informatics and Telecommunications Engineering</publisher-name>, vol. <volume>389</volume>, <publisher-loc>Cham</publisher-loc>: <publisher-name>Springer</publisher-name>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-7"><label>[7]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A. Y.</given-names> <surname>Osipov</surname></string-name>, <string-name><given-names>M. D.</given-names> <surname>Kudryavtsev</surname></string-name>, <string-name><given-names>A. G.</given-names> <surname>Galimova</surname></string-name>, <string-name><given-names>I. I.</given-names> <surname>Plotnikova</surname></string-name> and <string-name><given-names>N. V.</given-names> <surname>Skurikhina</surname></string-name></person-group>, &#x201C;<article-title>How can distance learning be used in the physical education of students?</article-title>,&#x201D; <source>Revista Romaneasca Pentru Educatie Multidimensionala</source>, vol. <volume>12</volume>, no. <issue>2Sup1</issue>, pp. <fpage>77</fpage>&#x2013;<lpage>85</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-8"><label>[8]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name>, <string-name><given-names>N.</given-names> <surname>Sharif</surname></string-name>, <string-name><given-names>J. T.</given-names> <surname>Kim</surname></string-name> and <string-name><given-names>T. -S.</given-names> <surname>Kim</surname></string-name></person-group>, &#x201C;<article-title>Human activity recognition via recognized body parts of human depth silhouettes for residents monitoring services at smart homes</article-title>,&#x201D; <source>Indoor and Built Environment</source>, vol. <volume>22</volume>, pp. <fpage>271</fpage>&#x2013;<lpage>279</lpage>, <year>2013</year>.</mixed-citation></ref>
<ref id="ref-9"><label>[9]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Radhakrishnan</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Rathnayake</surname></string-name>, <string-name><given-names>O. K.</given-names> <surname>Han</surname></string-name>, <string-name><given-names>I.</given-names> <surname>Hwang</surname></string-name> and <string-name><given-names>A.</given-names> <surname>Misra</surname></string-name></person-group>, &#x201C;<article-title>ERICA: Enabling real-time mistake detection &#x0026; corrective feedback for free-weights exercises</article-title>,&#x201D; in <conf-name>Proc. of the 18th Conf. on Embedded Networked Sensor Systems (SenSys &#x2018;20)</conf-name>, <conf-loc>New York, NY, USA</conf-loc>, <publisher-name>Association for Computing Machinery</publisher-name>, pp. <fpage>558</fpage>&#x2013;<lpage>571</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-10"><label>[10]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name>, <string-name><given-names>M. A.</given-names> <surname>Quaid</surname></string-name>, <string-name><given-names>S. B.</given-names> <surname>Tahir</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Kim</surname></string-name></person-group>, &#x201C;<article-title>A study of accelerometer and gyroscope measurements in physical life-log activities detection systems</article-title>,&#x201D; <source>Sensors</source>, vol. <volume>20</volume>, pp. <fpage>1</fpage>&#x2013;<lpage>22</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-11"><label>[11]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>J.</given-names> <surname>Madiha</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Gochoo</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Kim</surname></string-name></person-group>, &#x201C;<article-title>HF-SPHR: Hybrid features for sustainable physical healthcare pattern recognition using deep belief networks</article-title>,&#x201D; <source>Sustainability</source>, vol. <volume>13</volume>, pp. <fpage>1</fpage>&#x2013;<lpage>28</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-12"><label>[12]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A. M.</given-names> <surname>Khan</surname></string-name>, <string-name><given-names>Y. K.</given-names> <surname>Lee</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Lee</surname></string-name> and <string-name><given-names>T. S.</given-names> <surname>Kim</surname></string-name></person-group>, &#x201C;<article-title>Accelerometer&#x2019;s position independent physical activity recognition system for long-term activity monitoring in the elderly</article-title>,&#x201D; <source>Medical &#x0026; Biological Engineering &#x0026; Computing</source>, vol. <volume>48</volume>, pp. <fpage>1271</fpage>&#x2013;<lpage>1279</lpage>, <year>2010</year>.</mixed-citation></ref>
<ref id="ref-13"><label>[13]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>L.</given-names> <surname>Morillo</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Gonzalez-Abril</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Ramirez</surname></string-name> and <string-name><given-names>M.</given-names> <surname>de la Concepcion</surname></string-name></person-group>, &#x201C;<article-title>Low energy physical activity recognition system on smartphones</article-title>,&#x201D; <source>Sensors</source>, vol. <volume>15</volume>, no. <issue>3</issue>, pp. <fpage>5163</fpage>&#x2013;<lpage>5196</lpage>, <year>2015</year>.</mixed-citation></ref>
<ref id="ref-14"><label>[14]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Batool</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Kim</surname></string-name></person-group>, &#x201C;<article-title>Stochastic recognition of physical activity and healthcare using tri-axial inertial wearable sensors</article-title>,&#x201D; <source>Applied Sciences</source>, vol. <volume>10</volume>, pp. <fpage>1</fpage>&#x2013;<lpage>20</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-15"><label>[15]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M. A. K.</given-names> <surname>Quaid</surname></string-name> and <string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name></person-group>, &#x201C;<article-title>Wearable sensors based human behavioral pattern recognition using statistical features and reweighted genetic algorithm</article-title>,&#x201D; <source>Multimedia Tools and Applications</source>, vol. <volume>79</volume>, pp. <fpage>6061</fpage>&#x2013;<lpage>6083</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-16"><label>[16]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>N.</given-names> <surname>Khalid</surname></string-name>, <string-name><given-names>Y. Y.</given-names> <surname>Ghadi</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Gochoo</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Kim</surname></string-name></person-group>, &#x201C;<article-title>Semantic recognition of human-object interactions via Gaussian-based elliptical modeling and pixel-level labeling</article-title>,&#x201D; in <source>IEEE Access</source>, vol. <volume>9</volume>, pp. <fpage>111249</fpage>&#x2013;<lpage>111266</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-17"><label>[17]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A. N.</given-names> <surname>Chernodub</surname></string-name></person-group>, &#x201C;<article-title>Training neural networks for classification using the extended kalman filter: A comparative study</article-title>,&#x201D; <source>Optical Memory and Neural Networks</source>, vol. <volume>23</volume>, pp. <fpage>96</fpage>&#x2013;<lpage>103</lpage>, <year>2014</year>.</mixed-citation></ref>
<ref id="ref-18"><label>[18]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>B.</given-names> <surname>Tahir</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Kim</surname></string-name></person-group>, &#x201C;<article-title>IMU sensor based automatic-features descriptor for healthcare patient&#x2019;s daily life-log recognition</article-title>,&#x201D; in <conf-name>Proc. of IEEE Conf. on Applied Sciences and Technology</conf-name>, <conf-loc>Islamabad, Pakistan</conf-loc>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-19"><label>[19]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Javeed</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Kim</surname></string-name></person-group>, &#x201C;<article-title>Wearable sensors based exertion recognition using statistical features and random forest for physical healthcare monitoring</article-title>,&#x201D; in <conf-name>Proc. of the Int. Bhurban Conf. on Applied Sciences and Technologies (IBCAST)</conf-name>, <conf-loc>Islamabad, Pakistan</conf-loc>, pp. <fpage>512</fpage>&#x2013;<lpage>517</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-20"><label>[20]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>Y.</given-names> <surname>Liu</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Mu</surname></string-name>, <string-name><given-names>K.</given-names> <surname>Chen</surname></string-name>, <string-name><given-names>Y.</given-names> <surname>Li</surname></string-name> and <string-name><given-names>J.</given-names> <surname>Guo</surname></string-name></person-group>, &#x201C;<article-title>Daily activity feature selection in smart homes based on Pearson correlation coefficient</article-title>,&#x201D; <source>Neural Processing Letters</source>, vol. <volume>51</volume>, pp. <fpage>1771</fpage>&#x2013;<lpage>1787</lpage>, <year>2020</year>.</mixed-citation></ref>
<ref id="ref-21"><label>[21]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>H.</given-names> <surname>Xu</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Liu</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Hu</surname></string-name> and <string-name><given-names>Y.</given-names> <surname>Zhang</surname></string-name></person-group>, &#x201C;<article-title>Wearable sensor-based human activity recognition method with multi-features extracted from hilbert-huang transform</article-title>,&#x201D; <source>Sensors</source>, vol. <volume>16</volume>, no. <issue>12</issue>, pp. <fpage>2048</fpage>, <year>2016</year>.</mixed-citation></ref>
<ref id="ref-22"><label>[22]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Gochoo</surname></string-name>, <string-name><given-names>S. B. U. D.</given-names> <surname>Tahir</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Kim</surname></string-name></person-group>, &#x201C;<article-title>Monitoring real-time personal locomotion behaviors over smart indoor-outdoor environments via body-worn sensors</article-title>,&#x201D; in <source>IEEE Access</source>, vol. <volume>9</volume>, pp. <fpage>70556</fpage>&#x2013;<lpage>70570</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-23"><label>[23]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>V. S.</given-names> <surname>Mahalle</surname></string-name>, <string-name><given-names>G. N.</given-names> <surname>Bonde</surname></string-name>, <string-name><given-names>S. S.</given-names> <surname>Jadhao</surname></string-name> and <string-name><given-names>S. R.</given-names> <surname>Paraskar</surname></string-name></person-group>, &#x201C;<article-title>Teager energy operator: A signal processing approach for detection and classification of power quality events</article-title>,&#x201D; in <conf-name>Proc. of the 2018 2nd Int. Conf. on Trends in Electronics and Informatics (ICOEI)</conf-name>, <conf-loc>Tirunelveli, India</conf-loc>, pp. <fpage>1109</fpage>&#x2013;<lpage>1114</lpage>, <year>2018</year>.</mixed-citation></ref>
<ref id="ref-24"><label>[24]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Batool</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name> and <string-name><given-names>K.</given-names> <surname>Kim</surname></string-name></person-group>, &#x201C;<article-title>Sensors technologies for human activity analysis based on SVM optimized by PSO algorithm</article-title>,&#x201D; in <conf-name>Proc. of the 2019 Int. Conf. on Applied and Engineering Mathematics (ICAEM)</conf-name>, <conf-loc>Taxila, Pakistan</conf-loc>, pp. <fpage>145</fpage>&#x2013;<lpage>150</lpage>, <year>2019</year>.</mixed-citation></ref>
<ref id="ref-25"><label>[25]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>K.</given-names> <surname>Khoria</surname></string-name>, <string-name><given-names>M. R.</given-names> <surname>Kamble</surname></string-name> and <string-name><given-names>H. A.</given-names> <surname>Patil</surname></string-name></person-group>, &#x201C;<article-title>Teager energy cepstral coefficients for classification of normal <italic>vs.</italic> whisper speech</article-title>,&#x201D; in <conf-name>Proc. of 2020 28th European Signal Processing Conf. (EUSIPCO)</conf-name>, <conf-loc>Amsterdam, Netherlands</conf-loc>, pp. <fpage>1</fpage>&#x2013;<lpage>5</lpage>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-26"><label>[26]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Javeed</surname></string-name> and <string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name></person-group>, &#x201C;<article-title>Body-worn hybrid-sensors based motion patterns detection via Bag-of-features and fuzzy logic optimization</article-title>,&#x201D; in <conf-name>Proc. of the 2021 Int. Conf. on Innovative Computing (ICIC)</conf-name>, <conf-loc>Lahore, Pakistan</conf-loc>, <year>2021</year>.</mixed-citation></ref>
<ref id="ref-27"><label>[27]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>T.</given-names> <surname>Oberlin</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Meignen</surname></string-name> and <string-name><given-names>V.</given-names> <surname>Perrier</surname></string-name></person-group>, &#x201C;<article-title>The Fourier-based synchrosqueezing transform</article-title>,&#x201D; in <conf-name>Proc. of the 2014 IEEE Conf. on Acoustics, Speech and Signal Processing (ICASSP)</conf-name>, <conf-loc>Florence, Italy</conf-loc>, pp. <fpage>315</fpage>&#x2013;<lpage>319</lpage>, <year>2014</year>.</mixed-citation></ref>
<ref id="ref-28"><label>[28]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>H.</given-names> <surname>Chen</surname></string-name>, <string-name><given-names>L.</given-names> <surname>Lu</surname></string-name>, <string-name><given-names>D.</given-names> <surname>Xu</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Kang</surname></string-name> and <string-name><given-names>X.</given-names> <surname>Chen</surname></string-name></person-group>, &#x201C;<article-title>The synchrosqueezing algorithm based on generalized S-transform for high-precision time-frequency analysis</article-title>,&#x201D; <source>Applied Sciences</source>, vol. <volume>7</volume>, no. <issue>8</issue>, pp. <fpage>769</fpage>, <year>2017</year>.</mixed-citation></ref>
<ref id="ref-29"><label>[29]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Kamal</surname></string-name> and <string-name><given-names>D.</given-names> <surname>Kim</surname></string-name></person-group>, &#x201C;<article-title>A depth video sensor-based life-logging human activity recognition system for elderly care in smart indoor environments</article-title>,&#x201D; <source>Sensors</source>, vol. <volume>14</volume>, no. <issue>7</issue>, pp. <fpage>11735</fpage>&#x2013;<lpage>11759</lpage>, <year>2014</year>.</mixed-citation></ref>
<ref id="ref-30"><label>[30]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M. C.</given-names> <surname>Mahdi</surname></string-name> and <string-name><given-names>A. -R.</given-names> <surname>Shehab</surname></string-name></person-group>, &#x201C;<article-title>Attitude determination and control system design of KufaSat</article-title>,&#x201D; <source>International Journal of Current Engineering and Technology</source>, vol. <volume>4</volume>, no. <issue>4</issue>, pp. <fpage>2910</fpage>&#x2013;<lpage>2920</lpage>, <year>2014</year>.</mixed-citation></ref>
<ref id="ref-31"><label>[31]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>D. W.</given-names> <surname>Ruck</surname></string-name>, <string-name><given-names>S. K.</given-names> <surname>Rogers</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Kabrisky</surname></string-name>, <string-name><given-names>P. S.</given-names> <surname>Maybeck</surname></string-name> and <string-name><given-names>M. E.</given-names> <surname>Oxley</surname></string-name></person-group>, &#x201C;<article-title>Comparative analysis of backpropagation and the extended kalman filter for training multilayer perceptrons</article-title>,&#x201D; in <source>IEEE Transactions on Pattern Analysis and Machine Intelligence</source>, vol. <volume>14</volume>, no. <issue>6</issue>, pp. <fpage>686</fpage>&#x2013;<lpage>691</lpage>, <year>1992</year>.</mixed-citation></ref>
<ref id="ref-32"><label>[32]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>A.</given-names> <surname>Jalal</surname></string-name>, <string-name><given-names>Y. -H.</given-names> <surname>Kim</surname></string-name>, <string-name><given-names>Y. -J.</given-names> <surname>Kim</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Kamal</surname></string-name> and <string-name><given-names>D.</given-names> <surname>Kim</surname></string-name></person-group>, &#x201C;<article-title>Robust human activity recognition from depth video using spatiotemporal multi-fused features</article-title>,&#x201D; <source>Pattern Recognition</source>, vol. <volume>61</volume>, pp. <fpage>295</fpage>&#x2013;<lpage>308</lpage>, <year>2017</year>.</mixed-citation></ref>
<ref id="ref-33"><label>[33]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>M.</given-names> <surname>Shoaib</surname></string-name>, <string-name><given-names>S.</given-names> <surname>Bosch</surname></string-name>, <string-name><given-names>O. D.</given-names> <surname>Incel</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Scholten</surname></string-name>, and <string-name><given-names>P. J.</given-names> <surname>Havinga</surname></string-name></person-group>, &#x201C;<article-title>Complex human activity recognition using smartphone and wrist-worn motion sensors</article-title>,&#x201D; <source>Sensors (Basel, Switzerland)</source>, vol. <volume>16</volume>, no. <issue>4</issue>, pp. <fpage>426</fpage>, <year>2016</year>.</mixed-citation></ref>
<ref id="ref-34"><label>[34]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>G.</given-names> <surname>Spina</surname></string-name>, <string-name><given-names>G.</given-names> <surname>Huang</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Vaes</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Spruit</surname></string-name> and <string-name><given-names>O.</given-names> <surname>Amft</surname></string-name></person-group>, &#x201C;<article-title>COPDTrainer: A smartphone-based motion rehabilitation training system with real-time acoustic feedback</article-title>,&#x201D; in <conf-name>Proc. of the 2013 ACM Int. Joint Conf. on Pervasive and Ubiquitous Computing (UbiComp &#x2018;13)</conf-name>, <publisher-name>Association for Computing Machinery</publisher-name>, <conf-loc>New York, NY, USA</conf-loc>, pp. <fpage>597</fpage>&#x2013;<lpage>606</lpage>, <year>2013</year>.</mixed-citation></ref>
<ref id="ref-35"><label>[35]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>B.</given-names> <surname>Zhou</surname></string-name>, <string-name><given-names>M.</given-names> <surname>Sundholm</surname></string-name>, <string-name><given-names>J.</given-names> <surname>Cheng</surname></string-name>, <string-name><given-names>H.</given-names> <surname>Cruz</surname></string-name> and <string-name><given-names>P.</given-names> <surname>Lukowicz</surname></string-name></person-group>, &#x201C;<article-title>Never skip leg day: A novel wearable approach to monitoring gym leg exercises</article-title>,&#x201D; in <conf-name>Proc. of 2016 IEEE Int. Conf. on Pervasive Computing and Communications (PerCom)</conf-name>, pp. <fpage>1</fpage>&#x2013;<lpage>9</lpage>, <year>2016</year>.</mixed-citation></ref>
<ref id="ref-36"><label>[36]</label><mixed-citation publication-type="journal"><person-group person-group-type="author"><string-name><given-names>D.</given-names> <surname>Natarajasivan</surname></string-name> and <string-name><given-names>M.</given-names> <surname>Govindarajan</surname></string-name></person-group>, &#x201C;<article-title>S filter based sensor fusion for activity recognition using smartphone</article-title>,&#x201D; <source>International Journal of Computer Science and Telecommunications</source>, vol. <volume>7</volume>, no. <issue>5</issue>, pp. <fpage>26</fpage>&#x2013;<lpage>31</lpage>, <year>2016</year>.</mixed-citation></ref>
<ref id="ref-37"><label>[37]</label><mixed-citation publication-type="conf-proc"><person-group person-group-type="author"><string-name><given-names>C.</given-names> <surname>Crema</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Depari</surname></string-name>, <string-name><given-names>A.</given-names> <surname>Flammini</surname></string-name>, <string-name><given-names>E.</given-names> <surname>Sisinni</surname></string-name>, <string-name><given-names>T.</given-names> <surname>Haslwanter</surname></string-name> and <string-name><given-names>S.</given-names> <surname>Salzmann</surname></string-name></person-group>, &#x201C;<article-title>IMU-based solution for automatic detection and classification of exercises in the fitness scenario</article-title>,&#x201D; in <conf-name>2017 IEEE Sensors Applications Symp. (SAS)</conf-name>, <conf-loc>Glassboro, NJ, USA</conf-loc>, pp. <fpage>1</fpage>&#x2013;<lpage>6</lpage>, <year>2017</year>.</mixed-citation></ref>
</ref-list>
</back>
</article>