<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="brief-report">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Neurosci.</journal-id>
<journal-title>Frontiers in Neuroscience</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Neurosci.</abbrev-journal-title>
<issn pub-type="epub">1662-453X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fnins.2021.774857</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Neuroscience</subject>
<subj-group>
<subject>Brief Research Report</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Electroencephalogram-Based Motor Imagery Classification Using Deep Residual Convolutional Networks</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Huang</surname> <given-names>Jing-Shan</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/989826/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Liu</surname> <given-names>Wan-Shan</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Yao</surname> <given-names>Bin</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1482425/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Wang</surname> <given-names>Zhan-Xiang</given-names></name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1337286/overview"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Chen</surname> <given-names>Si-Fang</given-names></name>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<xref ref-type="corresp" rid="c002"><sup>&#x002A;</sup></xref>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Sun</surname> <given-names>Wei-Fang</given-names></name>
<xref ref-type="aff" rid="aff6"><sup>6</sup></xref>
<xref ref-type="corresp" rid="c003"><sup>&#x002A;</sup></xref>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>School of Aerospace Engineering, Xiamen University</institution>, <addr-line>Xiamen</addr-line>, <country>China</country></aff>
<aff id="aff2"><sup>2</sup><institution>Shenzhen Research Institute of Xiamen University</institution>, <addr-line>Shenzhen</addr-line>, <country>China</country></aff>
<aff id="aff3"><sup>3</sup><institution>Institute of Neurosurgery, School of Medicine, Xiamen University</institution>, <addr-line>Xiamen</addr-line>, <country>China</country></aff>
<aff id="aff4"><sup>4</sup><institution>Xiamen Key Laboratory of Brain Center, The First Affiliated Hospital of Xiamen University</institution>, <addr-line>Xiamen</addr-line>, <country>China</country></aff>
<aff id="aff5"><sup>5</sup><institution>Department of Neurosurgery, The First Affiliated Hospital of Xiamen University</institution>, <addr-line>Xiamen</addr-line>, <country>China</country></aff>
<aff id="aff6"><sup>6</sup><institution>College of Mechanical and Electrical Engineering, Wenzhou University</institution>, <addr-line>Wenzhou</addr-line>, <country>China</country></aff>
<author-notes>
<fn fn-type="edited-by"><p>Edited by: Yuanpeng Zhang, Nantong University, China</p></fn>
<fn fn-type="edited-by"><p>Reviewed by: Wei Cao, Huaqiao University, China; Zhiguo Wan, Xi&#x2019;an Shiyou University, China; Wenrong Xiao, China Three Gorges University, China</p></fn>
<corresp id="c001">&#x002A;Correspondence: Bin Yao, <email>aeroiiet@126.com</email></corresp>
<corresp id="c002">Si-Fang Chen, <email>csfsong143@aliyun.com</email></corresp>
<corresp id="c003">Wei-Fang Sun, <email>swf@wzu.edu.cn</email></corresp>
<fn fn-type="other" id="fn004"><p>This article was submitted to Brain Imaging Methods, a section of the journal Frontiers in Neuroscience</p></fn>
</author-notes>
<pub-date pub-type="epub">
<day>17</day>
<month>11</month>
<year>2021</year>
</pub-date>
<pub-date pub-type="collection">
<year>2021</year>
</pub-date>
<volume>15</volume>
<elocation-id>774857</elocation-id>
<history>
<date date-type="received">
<day>13</day>
<month>09</month>
<year>2021</year>
</date>
<date date-type="accepted">
<day>22</day>
<month>10</month>
<year>2021</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2021 Huang, Liu, Yao, Wang, Chen and Sun.</copyright-statement>
<copyright-year>2021</copyright-year>
<copyright-holder>Huang, Liu, Yao, Wang, Chen and Sun</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p></license>
</permissions>
<abstract>
<p>The classification of electroencephalogram (EEG) signals is of significant importance in brain-computer interface (BCI) systems. Aiming to achieve intelligent classification of motor imagery EEG types with high accuracy, a classification methodology using the wavelet packet decomposition (WPD) and the proposed deep residual convolutional networks (DRes-CNN) is proposed. Firstly, EEG waveforms are segmented into sub-signals. Then the EEG signal features are obtained through the WPD algorithm, and some selected wavelet coefficients are retained and reconstructed into EEG signals in their respective frequency bands. Subsequently, the reconstructed EEG signals were utilized as input of the proposed deep residual convolutional networks to classify EEG signals. Finally, EEG types of motor imagination are classified by the DRes-CNN classifier intelligently. The datasets from BCI Competition were used to test the performance of the proposed deep learning classifier. Classification experiments show that the average recognition accuracy of this method reaches 98.76%. The proposed method can be further applied to the BCI system of motor imagination control.</p>
</abstract>
<kwd-group>
<kwd>electroencephalogram (EEG)</kwd>
<kwd>motor imagery (MI)</kwd>
<kwd>wavelet packet decomposition (WPD)</kwd>
<kwd>residual</kwd>
<kwd>convolutional neural networks</kwd>
</kwd-group>
<counts>
<fig-count count="4"/>
<table-count count="0"/>
<equation-count count="8"/>
<ref-count count="31"/>
<page-count count="8"/>
<word-count count="5628"/>
</counts>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="S1">
<title>Introduction</title>
<p>Electroencephalogram (EEG) is a common biological signal in the medical field. People obtain EEG signals by collecting and recording the potential changes of the superficial skin of the head, and characterize the activity characteristics of the brain. The research of EEG signals is widely used in various aspects. In the field of biomedicine, EEG signal researches help doctors diagnose neurological diseases, such as frostbite, epilepsy, Alzheimer&#x2019;s disease, childhood developmental disorders, schizophrenia, Parkinson&#x2019;s disease and other functional diseases. EEG can detect sleep quality, fatigue driving, and drunk driving. It can also study human brain functions such as emotion, cognition, memory, and sports. At present, a mainstream research direction is to use brain waves to control objects. Through certain processing of EEG signals, certain parts of the body can be controlled to make certain actions. At present, certain research results have been achieved. For example, people invented instruments and tools to facilitate the lives of the disabled. Motor imaging signal is a kind of brain electrical signal, which is often used in brain-computer interface (BCI).</p>
<p>Brain-computer interface technology is a system that does not pass through the normal physiological pathways of the human body, but allows the brain to directly transmit information or control commands to computers or related instruments (<xref ref-type="bibr" rid="B8">Gong, 2014</xref>). When the brain performs motor imagination, the corresponding brain regional potential will also change accordingly due to the different imagination of the imagination activity. The corresponding changes in the brain are detected, and the computer is used to convert these detected change signals into instructions to control the lower computer (<xref ref-type="bibr" rid="B7">Cilliers and Van Der Kouwe, 1993</xref>). The EEG signal is an important part of the BCI system. The acquisition of motor imagery EEG signals is the first step to realize the operation of the BCI system. Then the EEG signal is processed and decoded. Finally, the EEG signal is translated into &#x201C;machine language&#x201D; through the control instruction conversion module to drive the external equipment, so that the purpose of human-computer interaction can be realized.</p>
<p>Pattern recognition of various states of the human body based on brainwave detection is a very popular research topic, and it has produced quite constructive results in many fields. K. Polat proposed a classification method of epileptiform EEG using a hybrid system based on decision tree classifier and fast Fourier transform (<xref ref-type="bibr" rid="B13">Kemal and Salih, 2007</xref>). M. V. M. Yeo and X. P. Li used support vector machines for pattern recognition and developed a method to automatically detect the driver&#x2019;s fatigue driving state (<xref ref-type="bibr" rid="B18">Mervyn et al., 2009</xref>). The method recognition accuracy reached 99.3%, and it can reliably predict the transition from alertness to drowsiness. T. Nguyen proposed a threshold method to identify blinking state, and achieved good results in the detection results (<xref ref-type="bibr" rid="B19">Nguyen et al., 2013</xref>). Wang proposed an EEG eye state identification method using incremental attribute learning with time-series classification, and the method finally achieved an accuracy rate higher than average (<xref ref-type="bibr" rid="B27">Wang et al., 2014</xref>). S. K. Satapathy used neural network and support vector machine to perform brain wave-based pattern recognition for epilepsy and has achieved good recognition results (<xref ref-type="bibr" rid="B25">Satapathy et al., 2017</xref>). G. Anumanchipalli uses the RNN deep learning model to directly read the thoughts in the brains of paralyzed patients using a BCI. The spoken sentences can reach 150 words per minute, which is close to the normal level of people (<xref ref-type="bibr" rid="B1">Anumanchipalli et al., 2019</xref>).</p>
<p>Since the end of the 1960s, humans have studied BCI technology for more than 50 years. Since the 21st century, the research of BCI has become more and more prosperous. Four international BCI competitions were successfully held. Researchers have systematically analyzed the processing methods of EEG signals and produced some mature applications. The BCI laboratory team at the Cognitive Institute of Graz University of Technology in Austria first implemented a BCI based on online EEG classification. The team developed a variety of BCI systems using motor imaging brain electrical signals, including imagining different limb movements to control the movement of the wheelchair (<xref ref-type="bibr" rid="B20">Pfurtscheller et al., 1993</xref>, <xref ref-type="bibr" rid="B21">2006</xref>), and using brain waves to control the movement of the mouse to find coins (<xref ref-type="bibr" rid="B23">Pfurtscheller et al., 2000</xref>). The BCI Research Institute in Berlin has developed a typing system (<xref ref-type="bibr" rid="B3">Benjamin et al., 2003</xref>). The subjects selected different characters for typing by imagining the movements of the left hand, right hand, and foot. The Washington Research Center in the United States uses different EEG rhythm signals generated by motor imagination to realize the free movement of the virtual cursor in three-dimensional space (<xref ref-type="bibr" rid="B17">McFarland et al., 2010</xref>). The BSI-TOYOTA Collaboration Center in Japan has successfully developed a real-time control wheelchair using brain waves. By imagining the front, left, and right to control the direction of the wheelchair, a 125 ms response control system for the electric wheelchair can be realized (<xref ref-type="bibr" rid="B2">Bai, 2010</xref>). Gao Shangkai of Tsinghua University used the characteristics of motor imaginary EEG signals to develop a system that uses EEG signals to control robot dogs playing football (<xref ref-type="bibr" rid="B28">Wang et al., 2007</xref>). Xu Baoguo of Southeast University controlled the robot arm to make corresponding actions based on imagining the movement of the hand (<xref ref-type="bibr" rid="B29">Xu et al., 2011</xref>). The average accuracy of motor imaging EEG for manipulator control is 88%. Li Yuanqing of South China University of Technology designed a hybrid BCI system that combines motor-imaging EEG signals and P300 signals, which control the horizontal and vertical movement of the cursor, respectively (<xref ref-type="bibr" rid="B14">Li et al., 2010</xref>).</p>
<p>Although many laboratory results have been achieved in the research of BCI technology, there are still few products that can be applied in real life. The BCI technology is still in the stage of theoretical research and laboratory development, and the application system needs to be further improved. There are still many key technologies that need to be improved. Firstly, the existing BCI systems have poor adaptability. Different individuals have different physiological functions, so people&#x2019;s EEG physiological responses to the same task will also be different. When the same individual performs the same motor imagination activity in different mental states, the EEG response may also be different. The adaptability of the future BCI system should not only meet the differences between different individuals, but also meet the changes of different states of the same individual. Secondly, the recognition speed and accuracy of the BCI system need to be improved. Classification accuracy and recognition speed are the most commonly performance evaluation indicators used in BCI system. The existing feature extraction methods used for classification of motion imaging tasks are relatively complicated. The large amount of data leads to long calculation time and slow system processing speed. However, reducing the amount of data or simplifying the signal feature extraction method will cause the classification accuracy to decrease. The key challenge of current research is to speed up the processing speed of the system while ensuring the accuracy of classification. In addition, it is relatively difficult to integrate technologies in different fields with BCI system. Promoting the technical integration of BCI application systems and being accepted by users is also an important practical problem faced by BCI systems.</p>
<p>In this article, we propose an accurate EEG signal classification method using Deep Residual Convolutional Neural Network (DRes-CNN). The EEG signals in BCI Competition 2005 data set IVa and BCI Competition 2003 data set III are selected as the original data. The wavelet packet decomposition (WPD) was used for preprocessing to obtain the characteristics of EEG signals. Subsequently, the reconstructed EEG signals of different frequency bands were used as the input of DRes-CNN to finally identify and classify the EEG types. The classification results show that the average accuracy of the proposed DRes-CNN model can reach 98.76%. The rest of this article is organized as follows. In section &#x201C;Method,&#x201D; we explained the methods used for EEG classification, including database and segmentation, data preprocessing based on WPD, and the proposed deep residual neural network. In section &#x201C;Results,&#x201D; the numerical evaluation and experimental results of the EEG classification are shown. Finally, we give the discussion and conclusion in section &#x201C;Discussion.&#x201D;</p>
</sec>
<sec id="S2">
<title>Method</title>
<sec id="S2.SS1">
<title>Methodology Overview</title>
<p>The proposed EEG classification method is based on the WPD and the proposed deep residual convolutional networks. The original EEG signals were shared by the BCI Competition database (<xref ref-type="bibr" rid="B4">Blankertz et al., 2006</xref>). Firstly, EEG waveforms are segmented into sub-signals. Then the EEG signal features are obtained through the WPD algorithm, and some selected wavelet coefficients are retained and reconstructed into EEG signals in their respective frequency bands. Subsequently, the reconstructed EEG signals were utilized as input of the proposed deep residual convolutional networks to classify EEG signals. Finally, EEG types of motor imagination are classified by the DRes-CNN classifier intelligently.</p>
</sec>
<sec id="S2.SS2">
<title>Database and Segmentation</title>
<p>The international organizations have held several BCI competitions since 2001. The International BCI Competition provides a reliable data source and a unified test standard for researchers in the field of motor imaging EEG signal analysis. The experimental data in this article comes from the databases in BCI Competition 2005 (dataset IVa) and BCI Competition 2003 (dataset III).</p>
<p>These two databases contain data sets recorded by five subjects (aa, al, av, aw, and ay). All five subjects performed the BCI experiment, which included three exercises of motor imagination for the left hand, right hand, and right foot. In this experiment, only the right hand (R) and right foot (F) two types of motor imagination are used for data analysis, and they are named Class-1 and Class-2. Each EEG signal has 118 channels. These motor imaging tasks are classified by using the EEG signals recorded on the C3, Cz, and C4 channels. At the beginning of the experiment, a prompt appeared in the center of the screen to inform the subject of the motion imaging task to be performed. Each test takes 7 s. During the first 2 s, the subject remained sitting still. At <italic>t</italic> = 2 s, an auditory stimulus will appear, prompting the start of the experiment. At <italic>t</italic> = 3 s, an arrow will appear on the screen to indicate which imaginary exercise the subject is performing. At the same time, the subject began to perform an imaginary movement in the same direction as the arrow prompts. The subject&#x2019;s imagination time is 3.5 s. After the motion imaging, the subjects had a short rest period, which ranged from 1.75 to 2.25 s. At <italic>t</italic> = 7 s, the arrow disappears and the subject ends the imaginary action. The sampling frequency of EEG is 250 Hz. The EEG waveform is divided into time samples of 3.5 s. There are 140 experimental samples for each type of EEG signal.</p>
</sec>
<sec id="S2.SS3">
<title>Data Preprocessing <italic>via</italic> Wavelet Packet Decomposition</title>
<p>Electroencephalogram signals have time-varying and non-stationary characteristics. Time domain analysis mainly considers the geometric characteristics of signal variance and mean value, and frequency domain analysis mainly considers the characteristics of signal coherence and frequency band power. EEG signals are constantly changing with time. Neither time domain analysis nor frequency domain analysis alone can accurately reflect its characteristics. Time-domain joint analysis is more suitable for reflecting the transient characteristics of non-stationary signals.</p>
<p>Wavelet packet decomposition (<xref ref-type="bibr" rid="B26">Walczak and Massart, 1997</xref>; <xref ref-type="bibr" rid="B16">Manthalkar et al., 2003</xref>) is an improved method based on wavelet decomposition. This method makes up for the low resolution of the high-frequency part of WPD. It can analyze the signal more accurately. For different signals, WPD can automatically select the appropriate frequency band to match the frequency spectrum of the signal, thereby improving the time-frequency resolution. WPD has a good performance in signal local analysis. It can effectively remove the redundant information and retain the feature information that is beneficial to classification to best express the EEG signal feature information.</p>
<p>In multi-resolution analysis, WPD is regarded as a process of stepwise orthogonal decomposition of a function space. Multi-resolution analysis decomposes the L<sup>2</sup>(R) of the Hilbert space into the orthogonal sum of all wavelet subspaces <bold>W<sub>l</sub></bold> according to different scale factors <bold>l</bold>. A new subspace <inline-formula><mml:math id="INEQ4"><mml:msubsup><mml:mtext mathvariant="bold">U</mml:mtext><mml:mi mathvariant="bold">l</mml:mi><mml:mi mathvariant="bold">m</mml:mi></mml:msubsup></mml:math></inline-formula> is defined to represent the wavelet subspace <bold>W<sub>l</sub></bold> and the scale space <bold>V<sub>l</sub></bold>.</p>
<disp-formula id="S2.E1"><label>(1)</label><mml:math id="M1" display="block"><mml:mrow><mml:mo>{</mml:mo><mml:mtable displaystyle="true" rowspacing="0pt"><mml:mtr><mml:mtd columnalign="center"><mml:mrow><mml:mrow><mml:msubsup><mml:mtext mathvariant="bold-italic">U</mml:mtext><mml:mi mathvariant="bold">l</mml:mi><mml:mn>0</mml:mn></mml:msubsup><mml:mo>=</mml:mo><mml:msub><mml:mtext mathvariant="bold-italic">V</mml:mtext><mml:mi mathvariant="bold">l</mml:mi></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mtext mathvariant="bold-italic">l</mml:mtext><mml:mo>&#x2208;</mml:mo><mml:mtext mathvariant="bold-italic">Z</mml:mtext></mml:mrow></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd columnalign="center"><mml:mrow><mml:mrow><mml:msubsup><mml:mtext mathvariant="bold-italic">U</mml:mtext><mml:mi mathvariant="bold">l</mml:mi><mml:mn mathvariant="bold">1</mml:mn></mml:msubsup><mml:mo>=</mml:mo><mml:msub><mml:mtext mathvariant="bold-italic">W</mml:mtext><mml:mi mathvariant="bold">l</mml:mi></mml:msub></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mtext mathvariant="bold-italic">l</mml:mtext><mml:mo>&#x2208;</mml:mo><mml:mtext mathvariant="bold-italic">Z</mml:mtext></mml:mrow></mml:mrow></mml:mtd></mml:mtr></mml:mtable><mml:mi/></mml:mrow></mml:math></disp-formula>
<p>The orthogonal decomposition of Hilbert space <bold>V<sub>l</sub></bold>&#x2295;<bold>W<sub>l</sub></bold> can be expressed as:</p>
<disp-formula id="S2.E2"><label>(2)</label><mml:math id="M2" display="block"><mml:mrow><mml:mrow><mml:mrow><mml:mrow><mml:msub><mml:mtext mathvariant="bold-italic">U</mml:mtext><mml:mi mathvariant="bold">l</mml:mi></mml:msub><mml:mo>+</mml:mo><mml:msup><mml:mn mathvariant="bold">1</mml:mn><mml:mn mathvariant="bold">0</mml:mn></mml:msup></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:msubsup><mml:mtext mathvariant="bold-italic">U</mml:mtext><mml:mi mathvariant="bold">l</mml:mi><mml:mn mathvariant="bold">0</mml:mn></mml:msubsup><mml:mo>&#x2295;</mml:mo><mml:msub><mml:mtext mathvariant="bold-italic">U</mml:mtext><mml:mpadded width="+2.8pt"><mml:mi mathvariant="bold">l</mml:mi></mml:mpadded></mml:msub></mml:mrow></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mtext mathvariant="bold-italic">l</mml:mtext><mml:mo>&#x2208;</mml:mo><mml:mpadded width="+2.8pt"><mml:mi mathvariant="bold">Z</mml:mi></mml:mpadded></mml:mrow></mml:mrow><mml:mo>.</mml:mo></mml:mrow></mml:math></disp-formula>
<p>Define the subspace <inline-formula><mml:math id="INEQ8"><mml:msubsup><mml:mtext mathvariant="bold-italic">U</mml:mtext><mml:mi mathvariant="bold">l</mml:mi><mml:mi mathvariant="bold">m</mml:mi></mml:msubsup></mml:math></inline-formula> as a closure space of the function <bold>u<sub>m</sub></bold>(<bold>t</bold>), so that <bold>u<sub>m</sub></bold>(<bold>t</bold>) satisfies:</p>
<disp-formula id="S2.E3"><label>(3)</label><mml:math id="M3" display="block"><mml:mrow><mml:mrow><mml:mo>{</mml:mo><mml:mtable displaystyle="true" rowspacing="0pt"><mml:mtr><mml:mtd columnalign="center"><mml:mrow><mml:mrow><mml:msub><mml:mtext mathvariant="bold-italic">u</mml:mtext><mml:mrow><mml:mn mathvariant="bold">2</mml:mn><mml:mo>&#x2062;</mml:mo><mml:mi mathvariant="bold">m</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi mathvariant="bold">t</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:msqrt><mml:mn mathvariant="bold">2</mml:mn></mml:msqrt><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mstyle displaystyle="false"><mml:msub><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi mathvariant="bold">k</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mi mathvariant="bold">Z</mml:mi></mml:mrow></mml:msub></mml:mstyle><mml:mrow><mml:mtext mathvariant="bold-italic">h</mml:mtext><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi mathvariant="bold">k</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2062;</mml:mo><mml:msub><mml:mtext mathvariant="bold-italic">u</mml:mtext><mml:mrow><mml:mn mathvariant="bold">2</mml:mn><mml:mo>&#x2062;</mml:mo><mml:mi mathvariant="bold">m</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:mn mathvariant="bold">2</mml:mn><mml:mo>&#x2062;</mml:mo><mml:mtext mathvariant="bold-italic">t</mml:mtext></mml:mrow><mml:mo>-</mml:mo><mml:mtext mathvariant="bold-italic">k</mml:mtext></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mrow></mml:mrow></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd columnalign="center"><mml:mrow><mml:mrow><mml:msub><mml:mtext mathvariant="bold-italic">u</mml:mtext><mml:mrow><mml:mrow><mml:mn mathvariant="bold">2</mml:mn><mml:mo>&#x2062;</mml:mo><mml:mi mathvariant="bold">m</mml:mi></mml:mrow><mml:mo>+</mml:mo><mml:mn mathvariant="bold">1</mml:mn></mml:mrow></mml:msub><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi mathvariant="bold">t</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:msqrt><mml:mn mathvariant="bold">2</mml:mn></mml:msqrt><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mstyle displaystyle="false"><mml:msub><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi mathvariant="bold">k</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mi mathvariant="bold">Z</mml:mi></mml:mrow></mml:msub></mml:mstyle><mml:mrow><mml:mtext mathvariant="bold-italic">g</mml:mtext><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi mathvariant="bold">k</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2062;</mml:mo><mml:msub><mml:mtext mathvariant="bold-italic">u</mml:mtext><mml:mrow><mml:mn mathvariant="bold">2</mml:mn><mml:mo>&#x2062;</mml:mo><mml:mi mathvariant="bold">m</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:mn mathvariant="bold">2</mml:mn><mml:mo>&#x2062;</mml:mo><mml:mtext mathvariant="bold-italic">t</mml:mtext></mml:mrow><mml:mo>-</mml:mo><mml:mtext mathvariant="bold-italic">k</mml:mtext></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mrow></mml:mrow></mml:mrow></mml:mtd></mml:mtr></mml:mtable><mml:mi/></mml:mrow><mml:mo rspace="5.3pt">,</mml:mo></mml:mrow></mml:math></disp-formula>
<p>where <bold>g</bold>(<bold>k</bold>) = (&#x2212;&#x2212;<bold><sup>1</sup></bold>)<bold>k</bold><bold>h</bold>(<bold>1</bold>&#x2212;<bold>k</bold>), <bold>g</bold>(<bold>k</bold>), and <bold>h</bold>(<bold>k</bold>) are the coefficients of the high-pass filter and the low-pass filter, which are orthogonal to each other.</p>
<p>When <italic>m</italic> = 0, from Equation 3, we can get:</p>
<disp-formula id="S2.E4"><label>(4)</label><mml:math id="M4" display="block"><mml:mrow><mml:mrow><mml:mo>{</mml:mo><mml:mtable displaystyle="true" rowspacing="0pt"><mml:mtr><mml:mtd columnalign="center"><mml:mrow><mml:mrow><mml:msub><mml:mtext mathvariant="bold-italic">u</mml:mtext><mml:mn mathvariant="bold">0</mml:mn></mml:msub><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi mathvariant="bold">t</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mstyle displaystyle="false"><mml:msub><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi mathvariant="bold">k</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mi mathvariant="bold">Z</mml:mi></mml:mrow></mml:msub></mml:mstyle><mml:mrow><mml:mtext mathvariant="bold-italic">h</mml:mtext><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi mathvariant="bold">k</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2062;</mml:mo><mml:msub><mml:mtext mathvariant="bold-italic">u</mml:mtext><mml:mn mathvariant="bold">0</mml:mn></mml:msub><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:mn mathvariant="bold">2</mml:mn><mml:mo>&#x2062;</mml:mo><mml:mtext mathvariant="bold-italic">t</mml:mtext></mml:mrow><mml:mo>-</mml:mo><mml:mtext mathvariant="bold-italic">k</mml:mtext></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mrow></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd columnalign="center"><mml:mrow><mml:mrow><mml:msub><mml:mtext mathvariant="bold-italic">u</mml:mtext><mml:mn mathvariant="bold">1</mml:mn></mml:msub><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi mathvariant="bold">t</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mstyle displaystyle="false"><mml:msub><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi mathvariant="bold">k</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mi mathvariant="bold">Z</mml:mi></mml:mrow></mml:msub></mml:mstyle><mml:mrow><mml:mtext mathvariant="bold-italic">g</mml:mtext><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi mathvariant="bold">k</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2062;</mml:mo><mml:msub><mml:mtext mathvariant="bold-italic">u</mml:mtext><mml:mn mathvariant="bold">0</mml:mn></mml:msub><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:mn mathvariant="bold">2</mml:mn><mml:mo>&#x2062;</mml:mo><mml:mtext mathvariant="bold-italic">t</mml:mtext></mml:mrow><mml:mo>-</mml:mo><mml:mtext mathvariant="bold-italic">k</mml:mtext></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mrow></mml:mrow></mml:mtd></mml:mtr></mml:mtable><mml:mi/></mml:mrow><mml:mo rspace="5.3pt">.</mml:mo></mml:mrow></mml:math></disp-formula>
<p>In the process of multi-resolution analysis, the wavelet basis function <italic>&#x03C6;</italic>(<italic>t</italic>) and scale function <italic>&#x03C8;</italic>(<italic>t</italic>) satisfy:</p>
<disp-formula id="S2.E5"><label>(5)</label><mml:math id="M5" display="block"><mml:mrow><mml:mrow><mml:mo>{</mml:mo><mml:mtable displaystyle="true" rowspacing="0pt"><mml:mtr><mml:mtd columnalign="center"><mml:mrow><mml:mrow><mml:mpadded lspace="2.8pt" width="+2.8pt"><mml:mi>&#x03C6;</mml:mi></mml:mpadded><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mstyle displaystyle="false"><mml:msub><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi mathvariant="bold">k</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mi mathvariant="bold">Z</mml:mi></mml:mrow></mml:msub></mml:mstyle><mml:mrow><mml:mtext mathvariant="bold-italic">g</mml:mtext><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi mathvariant="bold">k</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2062;</mml:mo><mml:mi>&#x03C8;</mml:mi><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:mn mathvariant="bold">2</mml:mn><mml:mo>&#x2062;</mml:mo><mml:mtext mathvariant="bold-italic">t</mml:mtext></mml:mrow><mml:mo>-</mml:mo><mml:mtext mathvariant="bold-italic">k</mml:mtext></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mrow></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd columnalign="center"><mml:mrow><mml:mrow><mml:mi>&#x03C8;</mml:mi><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mstyle displaystyle="false"><mml:msub><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi mathvariant="bold">k</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mi mathvariant="bold">Z</mml:mi></mml:mrow></mml:msub></mml:mstyle><mml:mrow><mml:mtext mathvariant="bold-italic">h</mml:mtext><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi mathvariant="bold">k</mml:mi><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2062;</mml:mo><mml:mi>&#x03C8;</mml:mi><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mrow><mml:mn mathvariant="bold">2</mml:mn><mml:mo>&#x2062;</mml:mo><mml:mtext mathvariant="bold-italic">t</mml:mtext></mml:mrow><mml:mo>-</mml:mo><mml:mtext mathvariant="bold-italic">k</mml:mtext></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:mrow></mml:mrow></mml:mrow></mml:mtd></mml:mtr></mml:mtable><mml:mi/></mml:mrow><mml:mo rspace="5.3pt">.</mml:mo></mml:mrow></mml:math></disp-formula>
<p>From the Equations 4, and 5, we can know that <italic>&#x03C6;</italic>(<italic>t</italic>) = <italic>u</italic><sub>1</sub>(<italic>t</italic>) and <italic>&#x03C6;</italic>(<italic>t</italic>) = <italic>u</italic><sub>0</sub>(<italic>t</italic>). Therefore {<bold>u<sub>m</sub></bold>(<bold>t</bold>)}<sub><bold><italic>m</italic></bold>&#x2208;<bold><italic>Z</italic></bold></sub> is an orthogonal wavelet packet. The calculation formula of the WPD coefficient is shown in the Equation 6:</p>
<disp-formula id="S2.E6"><label>(6)</label><mml:math id="M6" display="block"><mml:mrow><mml:mrow><mml:mo>{</mml:mo><mml:mpadded width="+2.8pt"><mml:mtable displaystyle="true" rowspacing="0pt"><mml:mtr><mml:mtd columnalign="center"><mml:mrow><mml:msubsup><mml:mi>d</mml:mi><mml:mi>i</mml:mi><mml:mrow><mml:mrow><mml:mi>l</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mn>2</mml:mn><mml:mo>&#x2062;</mml:mo><mml:mi>m</mml:mi></mml:mrow></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:mrow><mml:mstyle displaystyle="false"><mml:msub><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi mathvariant="bold">k</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mi mathvariant="bold">Z</mml:mi></mml:mrow></mml:msub></mml:mstyle><mml:mrow><mml:mtext mathvariant="bold-italic">h</mml:mtext><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mtext mathvariant="bold-italic">k</mml:mtext><mml:mo>-</mml:mo><mml:mrow><mml:mn mathvariant="bold">2</mml:mn><mml:mo>&#x2062;</mml:mo><mml:mtext mathvariant="bold-italic">i</mml:mtext></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2062;</mml:mo><mml:msubsup><mml:mtext mathvariant="bold-italic">d</mml:mtext><mml:mi mathvariant="bold">k</mml:mi><mml:mrow><mml:mi mathvariant="bold">l</mml:mi><mml:mo>,</mml:mo><mml:mi mathvariant="bold">m</mml:mi></mml:mrow></mml:msubsup></mml:mrow></mml:mrow></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd columnalign="center"><mml:mrow><mml:msubsup><mml:mtext mathvariant="bold-italic">d</mml:mtext><mml:mi mathvariant="bold">i</mml:mi><mml:mrow><mml:mrow><mml:mi mathvariant="bold">l</mml:mi><mml:mo>+</mml:mo><mml:mn mathvariant="bold">1</mml:mn></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mrow><mml:mn mathvariant="bold">2</mml:mn><mml:mo>&#x2062;</mml:mo><mml:mi mathvariant="bold">m</mml:mi></mml:mrow><mml:mo>+</mml:mo><mml:mn mathvariant="bold">1</mml:mn></mml:mrow></mml:mrow></mml:msubsup><mml:mo>=</mml:mo><mml:mrow><mml:mstyle displaystyle="false"><mml:msub><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi mathvariant="bold">k</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mi mathvariant="bold">Z</mml:mi></mml:mrow></mml:msub></mml:mstyle><mml:mrow><mml:mtext mathvariant="bold-italic">g</mml:mtext><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mtext mathvariant="bold-italic">k</mml:mtext><mml:mo>-</mml:mo><mml:mrow><mml:mn mathvariant="bold">2</mml:mn><mml:mo>&#x2062;</mml:mo><mml:mtext mathvariant="bold-italic">i</mml:mtext></mml:mrow></mml:mrow><mml:mo>)</mml:mo></mml:mrow><mml:mo>&#x2062;</mml:mo><mml:msubsup><mml:mtext mathvariant="bold-italic">d</mml:mtext><mml:mi mathvariant="bold">k</mml:mi><mml:mrow><mml:mi mathvariant="bold">l</mml:mi><mml:mo>,</mml:mo><mml:mi mathvariant="bold">m</mml:mi></mml:mrow></mml:msubsup></mml:mrow></mml:mrow></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:mpadded><mml:mi/></mml:mrow><mml:mo>.</mml:mo></mml:mrow></mml:math></disp-formula>
<p>The WPD has good time-frequency resolution in both high-frequency and low-frequency parts. This method saves all the energy of the signal, so it is very suitable for the analysis and processing of EEG signals. Since the information of the EEG signal reflected by the wavelet packet coefficients on each decomposition scale is different, it can be considered to extract features from part of the wavelet packet coefficients. In the process of WPD, the decomposition scale and basis function have a great influence on the decomposition effect. The higher the scale of WPD, the better its local characteristics. But the dimensionality of the feature is also larger, which will prolong the training time of the model. Studies have confirmed that the ERD/ERS phenomenon that characterizes motor imagination actions is mainly reflected in the 8&#x2013;30 Hz of the EEG signal (<xref ref-type="bibr" rid="B22">Pfurtscheller et al., 1997</xref>). Therefore, the decomposition scale should ensure that the frequency band corresponding to the wavelet packet coefficient is within the frequency range. Therefore, the decomposition level of WPD is determined to be four. The four-layer WPD of the EEG signal is shown in <xref ref-type="fig" rid="F1">Figure 1</xref>. The signal is divided into 16 frequency bands, and the frequency range corresponding to each node in each layer is shown in <xref ref-type="fig" rid="F1">Figure 1</xref>.</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption><p>Four-layer wavelet packet decomposition of EEG signal.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnins-15-774857-g001.tif"/>
</fig>
<p>According to the characteristics of the original EEG signal and the wavelet basis functions, the selected wavelet basis is Db4 (<xref ref-type="bibr" rid="B9">Guan et al., 2015</xref>). The original EEG is decomposed by wavelet packet to obtain wavelet coefficients on various scales. The wavelet coefficients of the 0th, 1st, 2nd, and 3rd nodes in the fourth-level decomposition are retained and reconstructed into EEG signals in their respective frequency bands. The reconstructed EEG signals filter out high-frequency noise in the original signal and signals in other frequency bands that are not related to motor imagination. The reconstructed EEG signals of <inline-formula><mml:math id="INEQ19"><mml:msubsup><mml:mtext mathvariant="bold-italic">S</mml:mtext><mml:mn mathvariant="bold">4</mml:mn><mml:mn mathvariant="bold">0</mml:mn></mml:msubsup></mml:math></inline-formula>, <inline-formula><mml:math id="INEQ20"><mml:msubsup><mml:mtext mathvariant="bold-italic">S</mml:mtext><mml:mn mathvariant="bold">4</mml:mn><mml:mn mathvariant="bold">1</mml:mn></mml:msubsup></mml:math></inline-formula>, <inline-formula><mml:math id="INEQ21"><mml:msubsup><mml:mtext mathvariant="bold-italic">S</mml:mtext><mml:mn mathvariant="bold">4</mml:mn><mml:mn mathvariant="bold">2</mml:mn></mml:msubsup></mml:math></inline-formula>, and <inline-formula><mml:math id="INEQ22"><mml:msubsup><mml:mtext mathvariant="bold-italic">S</mml:mtext><mml:mn mathvariant="bold">4</mml:mn><mml:mn mathvariant="bold">3</mml:mn></mml:msubsup></mml:math></inline-formula> are used as the input of the EEG classifier. The examples of short EEG recordings and their reconstructed sub recordings are shown in <xref ref-type="fig" rid="F2">Figure 2</xref>.</p>
<fig id="F2" position="float">
<label>FIGURE 2</label>
<caption><p>Examples of short EEG recordings and their reconstructed sub recordings.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnins-15-774857-g002.tif"/>
</fig>
</sec>
<sec id="S2.SS4">
<title>The Proposed Deep Residual Convolutional Networks</title>
<p>Convolutional neural network (CNN) is a deep feedforward neural network (<xref ref-type="bibr" rid="B15">Liu, 2018</xref>) inspired by the concept of &#x201C;receptive field.&#x201D; With the increase of the number of layers and neurons in the deep neural network, the non-linear fitting ability will increase. CNN is widely used in engineering fault diagnosis (<xref ref-type="bibr" rid="B5">Cao et al., 2019</xref>), medical signal recognition (<xref ref-type="bibr" rid="B6">Chandra et al., 2019</xref>; <xref ref-type="bibr" rid="B11">Huang et al., 2019</xref>), image recognition (<xref ref-type="bibr" rid="B24">Qu et al., 2016</xref>) and other fields. However, simply stacking the number of network layers will cause the problem of vanishing gradients. The network can be converged by normalizing initialization and introducing a median normalization layer. But for a deeper network, the accuracy of the model will decrease as the depth increases when the network model accuracy reaches saturation. This is the degradation problem of neural networks (<xref ref-type="bibr" rid="B10">He et al., 2016</xref>; <xref ref-type="bibr" rid="B31">Yu et al., 2016</xref>). The neural network learns an implicit abstract mapping relationship by adjusting its parameters. However, this implicit mapping relationship is difficult to optimize in a deeper network. The purpose of the deep residual convolutional neural network (DRes-CNN) method is to solve the degradation problem of traditional neural networks. The learning process of the DRes-CNN is using multiple consecutively stacked non-linear computing layers to fit the residual F(x) between the input data and the mapped output data. The residual F(x) is calculated as follows:</p>
<disp-formula id="S2.E7"><label>(7)</label><mml:math id="M7" display="block"><mml:mrow><mml:mrow><mml:mrow><mml:mtext mathvariant="bold-italic">F</mml:mtext><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi mathvariant="bold">x</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mrow><mml:mtext mathvariant="bold-italic">H</mml:mtext><mml:mo>&#x2062;</mml:mo><mml:mrow><mml:mo>(</mml:mo><mml:mi mathvariant="bold">x</mml:mi><mml:mo>)</mml:mo></mml:mrow></mml:mrow><mml:mo>-</mml:mo><mml:mtext mathvariant="bold-italic">x</mml:mtext></mml:mrow></mml:mrow><mml:mo>,</mml:mo></mml:mrow></mml:math></disp-formula>
<p>where H(x) is the optimal solution, and x is the input congruent mapping.</p>
<p>The closer the residual F(x) is to 0, the closer the features extracted by this network are to the original input. The DRes-CNN composed of the residual block local units can solve the difficulty in convergence and adjustment. It overcomes the degradation problem of CNN as the number of network layers increases.</p>
<p>In this section, we propose the deep residual convolutional neural networks (DRes-CNN). As shown in <xref ref-type="fig" rid="F3">Figure 3</xref>, the DRes-CNN is mainly composed of four residual convolution modules and a classification module. In the proposed deep residual convolutional networks, a convolutional layer with a stride of 3, a random dropout layer and a batch-normalization layer are applied firstly to compress the input EEG data and enhance the generalization of the DRes-CNN model. The down-sampling module can effectively simplifies the calculation of deep network models, reduces data redundancy, and promotes model learning (<xref ref-type="bibr" rid="B12">Huang et al., 2020</xref>). In the four residual convolution module, convolutional layers in series are followed by residual short circuit. Then a random dropout layer and the max-pooling layer are added after the convolutional layers. Finally, in the classification module, a flatten layer follows the convolution layer and a random dropout layer is applied after the flatten layer to prevent overfitting. In the proposed DRes-CNN model, the learning rate is set as 0.001 and the batch size parameter is set as 250.</p>
<fig id="F3" position="float">
<label>FIGURE 3</label>
<caption><p>The architecture of the proposed DRes-CNN.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnins-15-774857-g003.tif"/>
</fig>
</sec>
</sec>
<sec sec-type="results" id="S3">
<title>Results</title>
<sec id="S3.SS1">
<title>Evaluation Metrics</title>
<p>The performance of the classification model is mainly measured by the accuracy. The accuracy was calculated through Equation 8.</p>
<disp-formula id="S3.E8"><label>(8)</label><mml:math id="M8" display="block"><mml:mrow><mml:mtext mathvariant="bold-italic">Accuracy</mml:mtext><mml:mrow><mml:mo>(</mml:mo><mml:mo>%</mml:mo><mml:mo>)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mtext mathvariant="bold-italic">TP</mml:mtext><mml:mo>+</mml:mo><mml:mtext mathvariant="bold-italic">TN</mml:mtext></mml:mrow><mml:mrow><mml:mtext mathvariant="bold-italic">TP</mml:mtext><mml:mo>+</mml:mo><mml:mtext mathvariant="bold-italic">TN</mml:mtext><mml:mo>+</mml:mo><mml:mtext mathvariant="bold-italic">FP</mml:mtext><mml:mo>+</mml:mo><mml:mtext mathvariant="bold-italic">FN</mml:mtext></mml:mrow></mml:mfrac><mml:mo>&#x00D7;</mml:mo><mml:mpadded width="+2.8pt"><mml:mn mathvariant="bold">100</mml:mn></mml:mpadded><mml:mo>,</mml:mo></mml:mrow></mml:math></disp-formula>
<p>where TP stands for true positive, meaning the correct classification as Class-1 of EEG; TN stands for true negative, meaning correct classification as Class-2 of EEG; FP stands for false positive, meaning incorrect classification as Class-1 of EEG; FN represents false negative, meaning incorrect classification as Class-2 of EEG (<xref ref-type="bibr" rid="B30">Yin et al., 2016</xref>).</p>
</sec>
<sec id="S3.SS2">
<title>The Experimental Classification Results</title>
<p>In order to verify the effectiveness of the proposed EEG classification model, we classify the EEG signals of the right hand (Class-1) and right foot (Class-2). One hundred forty groups of eight feature inputs can be obtained for each type of EEG signal after data preprocessing based on WPD. All EEG training sample data is randomly scrambled, and the last 200 samples are selected as the test set. The classification of EEG signals is based on the classification algorithm described in section &#x201C;Method.&#x201D;</p>
<p>The original EEG waveform is divided into sub-signals. Then, the characteristics of the EEG signal are obtained through the WPD algorithm. The specific wavelet coefficients are retained and reconstructed into the EEG signals of respective frequency bands. Subsequently, the reconstructed EEG signal is used as the input of the proposed deep residual convolutional network to complete the classification of EEG signals. The experiment was run on a PC with 32 GB of memory and 16 GB of GPU memory.</p>
<p>Electroencephalogram signals of different frequency bands contain different characteristics and information. The wavelet packet can decompose the information of each frequency band. It makes the characteristics of the EEG signal easier to identify in each frequency band. In this section, the performance of training the DRes-CNN with reconstructed sub-signals as model input is studied experimentally, and the experiment results are shown in <xref ref-type="fig" rid="F4">Figure 4</xref>.</p>
<fig id="F4" position="float">
<label>FIGURE 4</label>
<caption><p>Comparison of average test accuracy by different reconstructed sub-signal.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnins-15-774857-g004.tif"/>
</fig>
<p>From <xref ref-type="fig" rid="F4">Figure 4</xref>, we can find that the accuracy of the classification test using the original EEG signal as input reaches 91.56%. The reconstructed EEG data set of <inline-formula><mml:math id="INEQ28"><mml:msubsup><mml:mi>S</mml:mi><mml:mn>4</mml:mn><mml:mn>0</mml:mn></mml:msubsup></mml:math></inline-formula> (0, 7.8125 Hz) reached an average test accuracy of 89.56%. The reconstructed EEG data set of <inline-formula><mml:math id="INEQ29"><mml:msubsup><mml:mi>S</mml:mi><mml:mn>4</mml:mn><mml:mn>1</mml:mn></mml:msubsup></mml:math></inline-formula> (7.8125, 15.625 Hz) reached the best average test accuracy of 98.76%. The reconstructed EEG data of <inline-formula><mml:math id="INEQ30"><mml:msubsup><mml:mi>S</mml:mi><mml:mn>4</mml:mn><mml:mn>2</mml:mn></mml:msubsup></mml:math></inline-formula> (15.625, 23.4375 Hz) set reached an average test accuracy of 93.54%. The reconstructed EEG data set of <inline-formula><mml:math id="INEQ31"><mml:msubsup><mml:mi>S</mml:mi><mml:mn>4</mml:mn><mml:mn>3</mml:mn></mml:msubsup></mml:math></inline-formula> (23.4375, 31.25 Hz) reached an average test accuracy of 95.95%. From the experimental comparison demonstrated above, we can conclude that the proposed DRes-CNN model shows the best classification performance when the reconstructed EEG dataset of <inline-formula><mml:math id="INEQ32"><mml:msubsup><mml:mi>S</mml:mi><mml:mn>4</mml:mn><mml:mn>1</mml:mn></mml:msubsup></mml:math></inline-formula> (7.8125, 15.625 Hz) is used as model input.</p>
</sec>
</sec>
<sec sec-type="discussion" id="S4">
<title>Discussion</title>
<p>In this article, we proposed an EEG classification method using WPD and the proposed deep residual convolutional network. The goal of this method is to achieve high-precision intelligent classification of motor-imaging EEG signals. The original EEG signal is shared by the BCI Competition database. Firstly, the EEG waveform is divided into shorter sub-signals. Then, the characteristics of the EEG signal are obtained through the WPD algorithm. Some selected wavelet coefficients are retained and reconstructed into EEG signals of their respective frequency bands. Subsequently, the reconstructed EEG signal is used as the input of the proposed deep residual convolutional network. Finally, the motor imagery EEG signals are intelligently classified by the DRes-CNN classifier. We compared the classification performance of reconstructed signals in different frequency bands as input to the model. Through comparative experiments, we found that the proposed DRes-CNN model shows the best classification accuracy of 98.76% when the reconstructed EEG data set in the frequency band of (7.8125, 15.625 Hz) is used as the model input. The proposed method can be further applied to the BCI system of motor imagination control.</p>
</sec>
<sec sec-type="data-availability" id="S5">
<title>Data Availability Statement</title>
<p>The original contributions presented in the study are included in the article/supplementary material, further inquiries can be directed to the corresponding authors.</p>
</sec>
<sec id="S6">
<title>Author Contributions</title>
<p>J-SH, W-SL, and BY conceived and designed the classification method. Z-XW and S-FC performed the experiment. J-SH preprocessed and analyzed the data and wrote the manuscript. BY, S-FC, and W-FS reviewed and edited the manuscript. J-SH and W-FS responded to the comments of the reviewers. All authors read and approved the manuscript.</p>
</sec>
<sec sec-type="COI-statement" id="conf1">
<title>Conflict of Interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="disclaimer" id="pudiscl1">
<title>Publisher&#x2019;s Note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
</body>
<back>
<sec sec-type="funding-information" id="S7">
<title>Funding</title>
<p>This research was supported financially by the National Natural Science Foundation of China (No. 51605403), the Fundamental Research Funds for the Central Universities under Grant 20720190009, International Science and Technology Cooperation Project of Fujian Province of China under Grant 2019I0003, China Aviation Engine Corporation&#x2019;s 2019 Industry-University-Research Cooperation Project Funding Project (No. HFZL2019CXY02), Natural Science Foundation of Fujian Province (2020J02063), and Xiamen Science and Technology Bureau Foundation of Science and Technology Project for Medical and Healthy (3502Z20209005).</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Anumanchipalli</surname> <given-names>G.</given-names></name> <name><surname>Chartier</surname> <given-names>J.</given-names></name> <name><surname>Chang</surname> <given-names>E.</given-names></name></person-group> (<year>2019</year>). <article-title>Speech synthesis from neural decoding of spoken sentences.</article-title> <source><italic>Nature</italic></source> <volume>568</volume> <fpage>493</fpage>&#x2013;<lpage>498</lpage>. <pub-id pub-id-type="doi">10.1038/s41586-019-1119-1</pub-id> <pub-id pub-id-type="pmid">31019317</pub-id></citation></ref>
<ref id="B2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bai</surname> <given-names>X.</given-names></name></person-group> (<year>2010</year>). <article-title>Real-time control of electric wheelchairs by brain waves.</article-title> <source><italic>Robot Technol. Appl.</italic></source> <volume>2</volume> <fpage>10</fpage>&#x2013;<lpage>12</lpage>.</citation></ref>
<ref id="B3"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Benjamin</surname> <given-names>B.</given-names></name> <name><surname>Guido</surname> <given-names>D.</given-names></name> <name><surname>Matthias</surname> <given-names>K.</given-names></name> <name><surname>Schroder</surname> <given-names>M.</given-names></name> <name><surname>Williamson</surname> <given-names>J.</given-names></name> <name><surname>Murray-Smit</surname> <given-names>R.</given-names></name><etal/></person-group> (<year>2003</year>). <article-title>The Berlin brain-computer interface presents the novel mental typewriter Hex-O-Spell.</article-title> <source><italic>Comput. Intell. Neurosci.</italic></source> <volume>14</volume> <fpage>332</fpage>&#x2013;<lpage>336</lpage>.</citation></ref>
<ref id="B4"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Blankertz</surname> <given-names>B.</given-names></name> <name><surname>Muller</surname> <given-names>K. R.</given-names></name> <name><surname>Krusienski</surname> <given-names>D. J.</given-names></name> <name><surname>Schalk</surname> <given-names>G.</given-names></name> <name><surname>Wolpaw</surname> <given-names>J. R.</given-names></name> <name><surname>Schl&#x00F6;gl</surname> <given-names>A.</given-names></name><etal/></person-group> (<year>2006</year>). <article-title>The BCI competition III: validating alternative approaches to actual BCI problems.</article-title> <source><italic>IEEE Trans. Neural Syst. Rehabil. Eng.</italic></source> <volume>14</volume> <fpage>153</fpage>&#x2013;<lpage>159</lpage>. <pub-id pub-id-type="doi">10.1109/TNSRE.2006.875642</pub-id> <pub-id pub-id-type="pmid">16792282</pub-id></citation></ref>
<ref id="B5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cao</surname> <given-names>X. C.</given-names></name> <name><surname>Chen</surname> <given-names>B. Q.</given-names></name> <name><surname>Yao</surname> <given-names>B.</given-names></name> <name><surname>He</surname> <given-names>W.-P.</given-names></name></person-group> (<year>2019</year>). <article-title>Combining translation-invariant wavelet frames and convolutional neural network for intelligent tool wear state identification.</article-title> <source><italic>Comput. Ind.</italic></source> <volume>106</volume> <fpage>71</fpage>&#x2013;<lpage>84</lpage>. <pub-id pub-id-type="doi">10.1016/j.compind.2018.12.018</pub-id></citation></ref>
<ref id="B6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chandra</surname> <given-names>B. S.</given-names></name> <name><surname>Sastry</surname> <given-names>C. S.</given-names></name> <name><surname>Jana</surname> <given-names>S.</given-names></name></person-group> (<year>2019</year>). <article-title>Robust heartbeat detection from multimodal data via CNN-Based generalizable information fusion.</article-title> <source><italic>IEEE Trans. Biomed. Eng.</italic></source> <volume>66</volume> <fpage>710</fpage>&#x2013;<lpage>717</lpage>. <pub-id pub-id-type="doi">10.1109/TBME.2018.2854899</pub-id> <pub-id pub-id-type="pmid">30004868</pub-id></citation></ref>
<ref id="B7"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cilliers</surname> <given-names>P. J.</given-names></name> <name><surname>Van Der Kouwe</surname> <given-names>A. J. W.</given-names></name></person-group> (<year>1993</year>). &#x201C;<article-title>A VEP-based computer interface for C2-ouadriplegics</article-title>,&#x201D; in <source><italic>Proceedings of the 15th Annual International Conference of the IEEE</italic></source>, (<publisher-loc>San Diego, CA</publisher-loc>: <publisher-name>IEEE</publisher-name>), <fpage>1263</fpage>&#x2013;<lpage>1263</lpage>. <pub-id pub-id-type="doi">10.1109/IEMBS.1993.979126</pub-id></citation></ref>
<ref id="B8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gong</surname> <given-names>P.</given-names></name></person-group> (<year>2014</year>). <source><italic>Research on Feature Extraction of Motor Imagination EEG Signal.</italic></source> <publisher-loc>Chongqing</publisher-loc>: <publisher-name>Chongqing University</publisher-name>.</citation></ref>
<ref id="B9"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Guan</surname> <given-names>W.</given-names></name> <name><surname>Qingzhi</surname> <given-names>Y.</given-names></name> <name><surname>Yang</surname> <given-names>G.</given-names></name><etal/></person-group> (<year>2015</year>). <article-title>Detection and location of transient power quality disturbances in distribution network based on db4 wavelet.</article-title> <source><italic>Power Syst. Prot. Control</italic></source> <volume>43</volume> <fpage>102</fpage>&#x2013;<lpage>106</lpage>.</citation></ref>
<ref id="B10"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>He</surname> <given-names>K.</given-names></name> <name><surname>Zhang</surname> <given-names>X.</given-names></name> <name><surname>Ren</surname> <given-names>S.</given-names></name> <name><surname>Sun</surname> <given-names>J.</given-names></name></person-group> (<year>2016</year>). &#x201C;<article-title>Deep residual learning for image recognition</article-title>,&#x201D; in <source><italic>Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition</italic></source>, <publisher-loc>Las Vegas, NV</publisher-loc>, <fpage>770</fpage>&#x2013;<lpage>778</lpage>. <pub-id pub-id-type="doi">10.1109/CVPR.2016.90</pub-id></citation></ref>
<ref id="B11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Huang</surname> <given-names>J.</given-names></name> <name><surname>Chen</surname> <given-names>B.</given-names></name> <name><surname>Yao</surname> <given-names>B.</given-names></name> <name><surname>He</surname> <given-names>W.</given-names></name></person-group> (<year>2019</year>). <article-title>ECG arrhythmia classification using STFT-based spectrogram and convolutional neural network.</article-title> <source><italic>IEEE Access</italic></source> <volume>7</volume> <fpage>92871</fpage>&#x2013;<lpage>92880</lpage>. <pub-id pub-id-type="doi">10.1109/ACCESS.2019.2928017</pub-id></citation></ref>
<ref id="B12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Huang</surname> <given-names>J.</given-names></name> <name><surname>Chen</surname> <given-names>B.</given-names></name> <name><surname>Zeng</surname> <given-names>N.</given-names></name> <name><surname>Cao</surname> <given-names>X.-C.</given-names></name> <name><surname>Li</surname> <given-names>Y.</given-names></name></person-group> (<year>2020</year>). <article-title>Accurate classification of ECG arrhythmia using MOWPT enhanced fast compression deep learning networks.</article-title> <source><italic>J. Ambient Intell. Humaniz. Comput.</italic></source> <volume>5</volume> <fpage>1</fpage>&#x2013;<lpage>18</lpage>. <pub-id pub-id-type="doi">10.1007/s12652-020-02110-y</pub-id></citation></ref>
<ref id="B13"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kemal</surname> <given-names>P.</given-names></name> <name><surname>Salih</surname> <given-names>G.</given-names></name></person-group> (<year>2007</year>). <article-title>Classification of epileptiform eeg using a hybrid system based on decision tree classifier and fast fourier transform.</article-title> <source><italic>Appl. Math. Comput.</italic></source> <volume>187</volume> <fpage>1017</fpage>&#x2013;<lpage>1026</lpage>. <pub-id pub-id-type="doi">10.1016/j.amc.2006.09.022</pub-id></citation></ref>
<ref id="B14"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>Y.</given-names></name> <name><surname>Long</surname> <given-names>J.</given-names></name> <name><surname>Yu</surname> <given-names>T.</given-names></name> <name><surname>Yu</surname> <given-names>Z.</given-names></name> <name><surname>Wang</surname> <given-names>C.</given-names></name> <name><surname>Zhang</surname> <given-names>H.</given-names></name><etal/></person-group> (<year>2010</year>). <article-title>An EEG-based BCI system for 2-D cursor control by combining Mu/Beta rhythm and P300 potential.</article-title> <source><italic>IEEE Trans. Biomed. Eng.</italic></source> <volume>57</volume> <fpage>495</fpage>&#x2013;<lpage>2505</lpage>. <pub-id pub-id-type="doi">10.1109/TBME.2010.2055564</pub-id> <pub-id pub-id-type="pmid">20615806</pub-id></citation></ref>
<ref id="B15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Liu</surname> <given-names>C.</given-names></name></person-group> (<year>2018</year>). <source><italic>Research and Design of Handwritten Digit Recognition Based on Convolutional Neural Network.</italic></source> <publisher-loc>Chengdu</publisher-loc>: <publisher-name>Chengdu University of Technology</publisher-name>.</citation></ref>
<ref id="B16"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Manthalkar</surname> <given-names>R.</given-names></name> <name><surname>Biswas</surname> <given-names>P. K.</given-names></name> <name><surname>Chatterji</surname> <given-names>B. N.</given-names></name></person-group> (<year>2003</year>). <article-title>Rotation and scale invariant texture features using discrete wavelet packet transform.</article-title> <source><italic>Patt. Recognit. Lett.</italic></source> <volume>24</volume> <fpage>2455</fpage>&#x2013;<lpage>2462</lpage>. <pub-id pub-id-type="doi">10.1016/S0167-8655(03)00090-4</pub-id></citation></ref>
<ref id="B17"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>McFarland</surname> <given-names>D. J.</given-names></name> <name><surname>Sarnacki</surname> <given-names>W. A.</given-names></name> <name><surname>Wolpaw</surname> <given-names>J. R.</given-names></name></person-group> (<year>2010</year>). <article-title>Electroencephalographic (EEG) control ofthree-dimensional movement.</article-title> <source><italic>J. Neural Eng.</italic></source> <volume>7</volume>:<fpage>036007</fpage>. <pub-id pub-id-type="doi">10.1088/1741-2560/7/3/036007</pub-id></citation></ref>
<ref id="B18"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mervyn</surname> <given-names>V.</given-names></name> <name><surname>Li</surname> <given-names>X.</given-names></name> <name><surname>Shen</surname> <given-names>K.</given-names></name> <name><surname>Wilder-Smith</surname> <given-names>E. P.</given-names></name></person-group> (<year>2009</year>). <article-title>Can SVM be used for automatic EEG detection of drowsiness during car driving.</article-title> <source><italic>Saf. Sci.</italic></source> <volume>47</volume> <fpage>115</fpage>&#x2013;<lpage>124</lpage>. <pub-id pub-id-type="doi">10.1016/j.ssci.2008.01.007</pub-id></citation></ref>
<ref id="B19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nguyen</surname> <given-names>T.</given-names></name> <name><surname>Nguyen</surname> <given-names>T.</given-names></name> <name><surname>Truong</surname> <given-names>K.</given-names></name> <name><surname>Toi</surname> <given-names>V.</given-names></name></person-group> (<year>2013</year>). <source><italic>A Mean Threshold Algorithm for Human Eye Blinking Detection Using EEG.</italic></source> <publisher-loc>Berlin</publisher-loc>: <publisher-name>Springer</publisher-name>, <fpage>275</fpage>&#x2013;<lpage>279</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-642-32183-2_69</pub-id></citation></ref>
<ref id="B20"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pfurtscheller</surname> <given-names>G.</given-names></name> <name><surname>Flotzinger</surname> <given-names>D.</given-names></name> <name><surname>Kalcher</surname> <given-names>J.</given-names></name></person-group> (<year>1993</year>). <article-title>Brain-computer interface-a new communication device for handicapped persons.</article-title> <source><italic>J. Microcomput. Appl.</italic></source> <volume>16</volume> <fpage>293</fpage>&#x2013;<lpage>299</lpage>. <pub-id pub-id-type="doi">10.1006/jmca.1993.1030</pub-id></citation></ref>
<ref id="B21"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pfurtscheller</surname> <given-names>G.</given-names></name> <name><surname>Muller-Putz</surname> <given-names>G. R.</given-names></name> <name><surname>Schlogl</surname> <given-names>A.</given-names></name><etal/></person-group> (<year>2006</year>). <article-title>15 years of BCI research at graz university of technology : current projects.</article-title> <source><italic>IEEE Trans. Neural Syst. Rehabil. Eng.</italic></source> <volume>14</volume> <fpage>205</fpage>&#x2013;<lpage>210</lpage>. <pub-id pub-id-type="doi">10.1109/TNSRE.2006.875528</pub-id> <pub-id pub-id-type="pmid">16792295</pub-id></citation></ref>
<ref id="B22"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pfurtscheller</surname> <given-names>G.</given-names></name> <name><surname>Neuper</surname> <given-names>C.</given-names></name> <name><surname>Flotzinger</surname> <given-names>D.</given-names></name> <name><surname>Pregenzer</surname> <given-names>M.</given-names></name></person-group> (<year>1997</year>). <article-title>EEG-based discrimination between imagination of right and left hand movement.</article-title> <source><italic>Electroencephalogr. Clin. Neurophysiol.</italic></source> <volume>103</volume> <fpage>642</fpage>&#x2013;<lpage>651</lpage>. <pub-id pub-id-type="doi">10.1016/S0013-4694(97)00080-1</pub-id></citation></ref>
<ref id="B23"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pfurtscheller</surname> <given-names>G.</given-names></name> <name><surname>Neuper</surname> <given-names>C.</given-names></name> <name><surname>Guger</surname> <given-names>C.</given-names></name> <name><surname>Harkam</surname> <given-names>W.</given-names></name> <name><surname>Ramoser</surname> <given-names>H.</given-names></name> <name><surname>Schl&#x00F6;gl</surname> <given-names>A.</given-names></name><etal/></person-group> (<year>2000</year>). <article-title>Current trends in Graz brain&#x2013;computer interface (BCI) research.</article-title> <source><italic>IEEE Trans. Rehabil. Eng.</italic></source> <volume>8</volume> <fpage>216</fpage>&#x2013;<lpage>219</lpage>. <pub-id pub-id-type="doi">10.1109/86.847821</pub-id></citation></ref>
<ref id="B24"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Qu</surname> <given-names>J.</given-names></name> <name><surname>Xian</surname> <given-names>S.</given-names></name> <name><surname>Xin</surname> <given-names>G.</given-names></name></person-group> (<year>2016</year>). <article-title>Remote sensing image target recognition based on CNN.</article-title> <source><italic>Foreign Electronic Meas. Technol.</italic></source> <volume>8</volume> <fpage>45</fpage>&#x2013;<lpage>50</lpage>.</citation></ref>
<ref id="B25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Satapathy</surname> <given-names>S.</given-names></name> <name><surname>Jagadev</surname> <given-names>A.</given-names></name> <name><surname>Dehuri</surname> <given-names>S.</given-names></name></person-group> (<year>2017</year>). <article-title>Weighted majority voting based ensemble of classifiers using different machine learning techniques for classification of eeg signal to detect epileptic seizure.</article-title> <source><italic>Informatica</italic></source> <volume>41</volume> <fpage>99</fpage>&#x2013;<lpage>110</lpage>.</citation></ref>
<ref id="B26"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Walczak</surname> <given-names>B.</given-names></name> <name><surname>Massart</surname> <given-names>D. L.</given-names></name></person-group> (<year>1997</year>). <article-title>Noise suppression and signal compression using the wavelet packet transform.</article-title> <source><italic>Chemometr. Intell. Lab. Syst.</italic></source> <volume>36</volume> <fpage>81</fpage>&#x2013;<lpage>94</lpage>. <pub-id pub-id-type="doi">10.1016/S0169-7439(96)00077-9</pub-id></citation></ref>
<ref id="B27"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>T.</given-names></name> <name><surname>Guan</surname> <given-names>S.</given-names></name> <name><surname>Man</surname> <given-names>K.</given-names></name> <name><surname>Ting</surname> <given-names>T.</given-names></name></person-group> (<year>2014</year>). <article-title>EEG eye state identification using incremental attribute learning with time-series classification.</article-title> <source><italic>Math. Probl. Eng.</italic></source> <volume>2014</volume>:<fpage>365101</fpage>. <pub-id pub-id-type="doi">10.1155/2014/365101</pub-id></citation></ref>
<ref id="B28"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>Y.</given-names></name> <name><surname>Hong</surname> <given-names>B.</given-names></name> <name><surname>Gao</surname> <given-names>X.</given-names></name> <name><surname>Gao</surname> <given-names>S.</given-names></name></person-group> (<year>2007</year>). <article-title>Implementation of a Brain-computer interface based on three states of motor imagery.</article-title> <source><italic>Annu. Int. Conf. IEEE Eng. Med. Biol. Soc.</italic></source> <volume>2007</volume> <fpage>5059</fpage>&#x2013;<lpage>5062</lpage>. <pub-id pub-id-type="doi">10.1109/IEMBS.2007.4353477</pub-id> <pub-id pub-id-type="pmid">18003143</pub-id></citation></ref>
<ref id="B29"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Xu</surname> <given-names>B.</given-names></name> <name><surname>Aiguo</surname> <given-names>S.</given-names></name> <name><surname>Renhuan</surname> <given-names>Y.</given-names></name></person-group> (<year>2011</year>). <article-title>Online brain-computer interface experiment based on motor imagination EEG.</article-title> <source><italic>J. Huazhong Univ. Sci. Technol.</italic></source> <volume>39</volume> <fpage>60</fpage>&#x2013;<lpage>64</lpage>.</citation></ref>
<ref id="B30"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Yin</surname> <given-names>W.</given-names></name> <name><surname>Yang</surname> <given-names>X.</given-names></name> <name><surname>Zhang</surname> <given-names>L.</given-names></name> <name><surname>Oki</surname> <given-names>E.</given-names></name></person-group> (<year>2016</year>). <article-title>ECG monitoring system integrated with IR-UWB radar based on CNN.</article-title> <source><italic>IEEE Access</italic></source> <volume>4</volume> <fpage>6344</fpage>&#x2013;<lpage>6351</lpage>. <pub-id pub-id-type="doi">10.1109/ACCESS.2016.2608777</pub-id></citation></ref>
<ref id="B31"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Yu</surname> <given-names>L.</given-names></name> <name><surname>Chen</surname> <given-names>H.</given-names></name> <name><surname>Dou</surname> <given-names>Q.</given-names></name> <name><surname>Qin</surname> <given-names>J.</given-names></name> <name><surname>Heng</surname> <given-names>P.-A.</given-names></name></person-group> (<year>2016</year>). <article-title>Automated melanoma recognition in dermoscopy images via very deep residual networks.</article-title> <source><italic>IEEE Trans. Med. Imaging</italic></source> <volume>36</volume> <fpage>994</fpage>&#x2013;<lpage>1004</lpage>. <pub-id pub-id-type="doi">10.1109/TMI.2016.2642839</pub-id> <pub-id pub-id-type="pmid">28026754</pub-id></citation></ref>
</ref-list>
</back>
</article>