<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Neurosci.</journal-id>
<journal-title>Frontiers in Neuroscience</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Neurosci.</abbrev-journal-title>
<issn pub-type="epub">1662-453X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fnins.2025.1654827</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Neuroscience</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>EEG synchronization signatures for decoding attentional states during continuous force control</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name>
<surname>Zheng</surname>
<given-names>Yilei</given-names>
</name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3010830/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Wang</surname>
<given-names>Qiaoxiu</given-names>
</name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3196780/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Tong</surname>
<given-names>Qianqian</given-names>
</name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/1852578/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Tian</surname>
<given-names>Bohao</given-names>
</name>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Su</surname>
<given-names>Peng</given-names>
</name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Xu</surname>
<given-names>Yonghong</given-names>
</name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3052066/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Wang</surname>
<given-names>Dangxiao</given-names>
</name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/431114/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>College of Mechanical and Electrical Engineering, Beijing Information Science and Technology University</institution>, <addr-line>Beijing</addr-line>, <country>China</country></aff>
<aff id="aff2"><sup>2</sup><institution>Qiyuan Laboratory</institution>, <addr-line>Beijing</addr-line>, <country>China</country></aff>
<aff id="aff3"><sup>3</sup><institution>Peng Cheng Laboratory</institution>, <addr-line>Shenzhen</addr-line>, <country>China</country></aff>
<aff id="aff4"><sup>4</sup><institution>State Key Laboratory of Virtual Reality Technology and Systems, Beihang University</institution>, <addr-line>Beijing</addr-line>, <country>China</country></aff>
<author-notes>
<fn fn-type="edited-by" id="fn0001">
<p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/12134/overview">Juan Lupi&#x00E1;&#x00F1;ez</ext-link>, University of Granada, Spain</p>
</fn>
<fn fn-type="edited-by" id="fn0002">
<p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2781011/overview">Klara Hemmerich</ext-link>, University of Trento, Italy</p>
<p><ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3151940/overview">Jorge Hidalgo</ext-link>, Universidad de C&#x00F3;rdoba, Spain</p>
</fn>
<corresp id="c001">&#x002A;Correspondence: Qiaoxiu Wang, <email>wangqiaoxiu@qiyuanlab.com</email></corresp>
</author-notes>
<pub-date pub-type="epub">
<day>08</day>
<month>10</month>
<year>2025</year>
</pub-date>
<pub-date pub-type="collection">
<year>2025</year>
</pub-date>
<volume>19</volume>
<elocation-id>1654827</elocation-id>
<history>
<date date-type="received">
<day>30</day>
<month>06</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>23</day>
<month>09</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2025 Zheng, Wang, Tong, Tian, Su, Xu and Wang.</copyright-statement>
<copyright-year>2025</copyright-year>
<copyright-holder>Zheng, Wang, Tong, Tian, Su, Xu and Wang</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<sec id="sec1001">
<title>Introduction</title>
<p>Mind wandering, the shift of attention from an ongoing task to task-unrelated thoughts, is a pervasive cognitive phenomenon often accompanied by detrimental consequences for task performance. While extensively studied in visual and auditory paradigms, attentional fluctuations during visuo-haptic tasks, such as force control, remain underexplored despite their high relevance to real-world skilled activities such as surgical operations or robotic-assisted manipulation. There exists a critical deficiency in exploring signatures of mind wandering from the perspective of neural synchronization.</p>
</sec>
<sec id="sec2001">
<title>Methods</title>
<p>This study investigated EEG-based synchronization features to decode attentional states during a novel continuous force control task using the thought-probe method. Nine healthy male participants tracked a dynamically varying target force while scalp EEG and high-frequency force data were recorded synchronously. EEG epochs preceding self-reported attentional probes were labeled as on-task or mind-wandering states. Spectral power and three synchronization features &#x2013; cross-frequency coupling, functional connectivity, and neural-behavioral synchronization &#x2013; were extracted and compared between on-task and mind-wandering states.</p>
</sec>
<sec id="sec3001">
<title>Results and discussion</title>
<p>Results revealed that the mind-wandering state was characterized by increased alpha power (8-10 Hz) over frontal-posterior regions and reduced occurrence of high alpha-theta harmonic ratios. It also exhibited increased functional connectivity within sensorimotor networks and decreased mutual information between frontal EEG activity and force errors. Support vector machine classifiers for the binary attentional-state classification, utilizing combined spectral power and synchronization features, achieved 75.53% within-participant and 71.57% cross-participant accuracy, outperforming single-feature models. These findings highlight EEG synchronization signatures of mind wandering and demonstrate their feasibility for decoding attentional states during the force control task. This work may provide a foundation for future exploration of haptic-based neurofeedback systems, which could potentially complement existing visual and auditory modalities in applications such as neurocognitive rehabilitation or skilled motor training.</p>
</sec>
</abstract>
<kwd-group>
<kwd>mind wandering</kwd>
<kwd>attentional state</kwd>
<kwd>electroencephalogram (EEG)</kwd>
<kwd>neural synchronization</kwd>
<kwd>machine learning</kwd>
<kwd>force control</kwd>
</kwd-group>
<counts>
<fig-count count="9"/>
<table-count count="3"/>
<equation-count count="3"/>
<ref-count count="71"/>
<page-count count="18"/>
<word-count count="12335"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Brain Imaging Methods</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="sec1">
<label>1</label>
<title>Introduction</title>
<p>Sustained attention, the cognitive ability to maintain focus on a given task over extended periods, is essential for our everyday lives. However, this ability is inherently limited. Attention often shifts from the ongoing task to spontaneous, task-unrelated thoughts&#x2014;a ubiquitous phenomenon termed mind wandering (MW) in academic research (<xref ref-type="bibr" rid="ref57">Smallwood and Schooler, 2015</xref>; <xref ref-type="bibr" rid="ref6">Christoff et al., 2016</xref>). While MW may benefit creativity and problem-solving under certain conditions, its negative outcomes, such as impaired task performance, increased risk of accidents, and affective dysfunction, have attracted significant research attention (<xref ref-type="bibr" rid="ref28">Killingsworth and Gilbert, 2010</xref>; <xref ref-type="bibr" rid="ref52">Schooler et al., 2011</xref>). Consequently, developing reliable and objective methods for detecting MW has become a major research focus in recent decades (<xref ref-type="bibr" rid="ref15">Fortenbaugh et al., 2018</xref>; <xref ref-type="bibr" rid="ref27">Kam et al., 2022</xref>; <xref ref-type="bibr" rid="ref60">Tang and Li, 2024</xref>). Investigating the neural mechanisms underlying MW and establishing objective detection methods hold promise for the diagnosis and intervention of attention-related neurological disorders, such as mild cognitive impairment (MCI) and attention deficit hyperactivity disorder (ADHD) (<xref ref-type="bibr" rid="ref70">Zhao and Yuan, 2025</xref>; <xref ref-type="bibr" rid="ref64">Wiebe et al., 2024</xref>).</p>
<p>The MW has primarily been studied through vigilance tasks within visual or auditory modalities, such as the continuous performance task (CPT), where participants respond to infrequent target stimuli (<xref ref-type="bibr" rid="ref14">Esterman et al., 2013</xref>), and the sustained attention to response task (SART) which typically requires responding to frequent non-targets while withholding responses to rare targets (<xref ref-type="bibr" rid="ref13">Eskandari Nasab et al., 2024</xref>). However, relatively little is known about MW during haptic or visuo-haptic tasks that involve continuous motor regulation, such as force control. Force control refers to the precise and continuous adjustment of muscle output or tool-applied forces to achieve task goals (<xref ref-type="bibr" rid="ref40">Nakahara et al., 2002</xref>). This ability is critical in many real-world scenarios, such as surgical tool manipulation, robotic-assisted rehabilitation exercises, and sports requiring fine motor adjustments. Studying MW in these contexts is important because lapses in attention can directly impair task performance, leading to increased errors or reduced efficiency.</p>
<p>Although the ultimate goal of MW detection research is to develop objective detection methods for MW, thought probing remains essential due to its inherently subjective nature. A widely used approach involves inserting probe questions into the ongoing task randomly or at the end of each block (<xref ref-type="bibr" rid="ref4">Cheyne et al., 2009</xref>; <xref ref-type="bibr" rid="ref58">Stawarczyk et al., 2011</xref>; <xref ref-type="bibr" rid="ref53">Seli et al., 2018</xref>). Upon encountering a probe, participants are asked to report the content of their thoughts or rate their attentional focus. By comparing measures within the few seconds preceding mind-wandering reports with those preceding on-task reports, researchers have linked behavioral indicators (e.g., errors and response time variability) (<xref ref-type="bibr" rid="ref14">Esterman et al., 2013</xref>; <xref ref-type="bibr" rid="ref71">Zheng et al., 2019</xref>; <xref ref-type="bibr" rid="ref44">Peng et al., 2021</xref>) and physiological signals (e.g., eye movements, pupillometry, and heart rate) (<xref ref-type="bibr" rid="ref56">Smallwood et al., 2007</xref>, <xref ref-type="bibr" rid="ref55">2011</xref>; <xref ref-type="bibr" rid="ref62">Wainstein et al., 2017</xref>) to self-reported MW.</p>
<p>Regarding neural correlates of attentional fluctuations, the electroencephalogram (EEG) has been widely used due to its high temporal resolution and applicability. EEG power-based metrics and event-related potential (ERP) components have been extensively studied as important parameters for characterizing MW (<xref ref-type="bibr" rid="ref27">Kam et al., 2022</xref>). Numerous studies have reported reduced amplitudes of ERP components (P1 and P3) prior to performance errors or MW reports (<xref ref-type="bibr" rid="ref2">Braboszcz and Delorme, 2010</xref>; <xref ref-type="bibr" rid="ref26">Kam et al., 2011</xref>; <xref ref-type="bibr" rid="ref33">Liu et al., 2021</xref>), supporting the &#x201C;perceptual decoupling&#x201D; hypothesis (<xref ref-type="bibr" rid="ref52">Schooler et al., 2011</xref>; <xref ref-type="bibr" rid="ref54">Smallwood, 2013</xref>). This theory posits that attention disengages from external sensory input during MW. Additionally, spectral features including delta, theta, alpha, and beta power have been extensively examined in relation to MW. For example, increased power in the alpha band has been linked to both vigilance decrement and the occurrence of MW during SART (<xref ref-type="bibr" rid="ref8">Compton et al., 2019</xref>; <xref ref-type="bibr" rid="ref22">Jin et al., 2019</xref>). Power-based indices derived from spectral bands (e.g., beta-to-alpha ratio and inverse alpha power) have also shown significant correlations with behavioral markers of attentional lapses (<xref ref-type="bibr" rid="ref7">Coelli et al., 2018</xref>), such as variations in mean reaction time.</p>
<p>While these spectral and ERP features provide valuable markers of MW, they largely capture localized neural activity. To further understand how distributed neural systems coordinate during attentional fluctuations, a growing number of studies have explored neural synchronization-based measures, including cross-frequency coupling (e.g., alpha-theta phase synchrony and harmonicity) and functional connectivity between brain regions. Functional connectivity (FC) measures the synchrony between signals recorded from different electrodes or regions (e.g., via phase-locking value, coherence), reflecting functional integration within networks (<xref ref-type="bibr" rid="ref1">Aydore et al., 2013</xref>). Numerous studies using functional Magnetic Resonance Imaging (fMRI) have shown that the Default Mode Network (DMN) exhibits increased activation and altered connectivity patterns with other networks (e.g., the dorsal attention network) during the MW state (<xref ref-type="bibr" rid="ref21">Jang et al., 2011</xref>; <xref ref-type="bibr" rid="ref38">Mittner et al., 2014</xref>; <xref ref-type="bibr" rid="ref6">Christoff et al., 2016</xref>; <xref ref-type="bibr" rid="ref29">Kucyi et al., 2016</xref>). A pioneering study combining DMN activation with pupil diameter achieved promising MW classification accuracy (~80% within-participant, ~65% across-participant) (<xref ref-type="bibr" rid="ref17">Groot et al., 2021</xref>). Cross-frequency coupling (CFC), which examines synchronization between neural oscillations of different frequencies, is believed to underlie complex information processing and communication. For example, increased alpha-theta phase synchrony has been associated with the occurrence of MW during breath-focused meditation (<xref ref-type="bibr" rid="ref48">Rodriguez-Larios and Alaerts, 2019</xref>, <xref ref-type="bibr" rid="ref49">2021</xref>; <xref ref-type="bibr" rid="ref50">Rodriguez-Larios et al., 2020</xref>). Despite these advances, the relationship between EEG-derived synchronization features (both FC and CFC) and attentional fluctuations remains less well-established and understood. Few studies have explicitly examined the efficacy of EEG synchronization features in classifying attentional states, and most have focused on visual paradigms (<xref ref-type="bibr" rid="ref37">Melinscak et al., 2016</xref>; <xref ref-type="bibr" rid="ref8">Compton et al., 2019</xref>; <xref ref-type="bibr" rid="ref17">Groot et al., 2021</xref>). The potential of these synchronization features for MW detection is largely unexplored.</p>
<p>Given these limitations, an important next step is to evaluate whether such synchronization features can enhance MW detection when combined with machine learning approaches. Recently, machine learning techniques have increasingly been applied to classify attentional states. The majority of these studies have extracted features from EEG recordings during visual tasks (e.g., SART, visual search tasks) and used spectral power or ERP components as classifier inputs, typically reporting classification accuracies of 60&#x2013;70% (<xref ref-type="bibr" rid="ref22">Jin et al., 2019</xref>, <xref ref-type="bibr" rid="ref23">2020</xref>; <xref ref-type="bibr" rid="ref12">Dong et al., 2021</xref>). Beyond basic features, researchers have also explored complexity-based metrics, such as sample entropy and permutation entropy. These metrics, which capture the irregularity or predictability of EEG signals, have yielded promising classification performance (e.g., AUC up to 0.71) in SART paradigms (<xref ref-type="bibr" rid="ref3">Chen et al., 2022</xref>). A recent study comparing complexity features with traditional band power reported comparable performance (AUC&#x202F;=&#x202F;0.64) in video learning tasks, with slight improvements from combining features (AUC&#x202F;=&#x202F;0.66) (<xref ref-type="bibr" rid="ref60">Tang and Li, 2024</xref>). Notably, features capturing neural synchronization, such as functional connectivity and alpha-theta phase synchrony, represent a promising yet underexplored avenue for improving MW detection accuracy.</p>
<p>Collectively, despite growing interest and recent advances in understanding attentional fluctuations and detecting MW, several critical issues need to be considered, particularly regarding task modalities and feature types. First, the majority of MW research has relied on visual or auditory tasks. In contrast, MW detection in haptic or visuo-haptic tasks&#x2014;such as force control&#x2014;remains significantly understudied. The neurocognitive basis of haptic perception involves complex feedback loops from hand tactile receptors to the primary somatosensory cortex (S1) and broader attentional networks (<xref ref-type="bibr" rid="ref30">Lederman and Klatzky, 2009</xref>; <xref ref-type="bibr" rid="ref18">Grunwald, 2008</xref>). The hand&#x2019;s acute perception and force control abilities make it particularly well-suited for investigating attentional fluctuations. Although preliminary work by <xref ref-type="bibr" rid="ref44">Peng et al. (2021)</xref> using a discrete force control task identified behavioral markers (e.g., reaction time variability) and basic EEG features (e.g., increased frontal-central alpha power) potentially related to MW, a systematic investigation into the neural signatures of attentional states&#x2014;particularly regarding synchronization dynamics during the force control process &#x2014;remains lacking. Second, the very few EEG studies examining attentional states during force control tasks have confined analysis to fundamental, well-established metrics such as spectral power density and ERPs (<xref ref-type="bibr" rid="ref68">Zhang et al., 2023</xref>; <xref ref-type="bibr" rid="ref9">Delisle-Rodriguez et al., 2023</xref>). There exists a critical deficiency in examining features from the perspective of neural synchronization, including FC, CFC, as well as the synchronization between neural activities and behavioral data, within the haptic modality. These synchronization metrics may reveal how distributed brain networks coordinate and how brain activity interacts with motor output during attentional lapses in force control. They may serve as novel and more sensitive biomarkers compared to isolated power or ERP components. Finally, as a consequence of these gaps, the utility and effectiveness of EEG synchronization features for decoding attentional states during force control tasks remain largely unexplored. Whether robust synchronization signatures can be extracted from complex sensorimotor tasks, and whether these features can reliably distinguish MW from on-task states, remain unclear.</p>
<p>Therefore, this study aims to address these issues by investigating EEG synchronization signatures within a force control paradigm. We propose a novel continuous visuo-haptic force control task in which participants precisely modulate their force to track a dynamically changing target force based on visual cues. During the task, participants reported their attentional states when thought probes appeared. Simultaneously, EEG and high-frequency force data were recorded. The objectives of this study are twofold:</p>
<list list-type="simple">
<list-item>
<p>(1) To examine EEG synchronization metrics&#x2014;specifically cross-frequency coupling, functional connectivity, and neural-behavioral synchronization&#x2014;sensitive to attentional fluctuations (on-task versus MW states) during force control.</p>
</list-item>
<list-item>
<p>(2) To evaluate the feasibility and classification performance of these synchronization features, both individually and in combination with other metrics, for classifying on-task and MW states using machine learning.</p>
</list-item>
</list>
<p>We hypothesize that synchronization metrics, capturing the interactive dynamics within the brain and between the brain and behavior, will provide unique and complementary information for decoding attentional states during force control. By identifying EEG synchronization markers of MW in this underexplored haptic context and evaluating their feasibility for decoding attentional states, this study aims to deepen our understanding of the neural basis of attention and lay the groundwork for future research into developing haptic-based neurofeedback attention training systems. Such systems could potentially offer a valuable complement to existing visual and auditory approaches for neurocognitive rehabilitation or skilled activities training.</p>
</sec>
<sec sec-type="methods" id="sec2">
<label>2</label>
<title>Methods</title>
<sec id="sec3">
<label>2.1</label>
<title>Participants</title>
<p>Fourteen healthy male adults (mean age&#x202F;=&#x202F;27.4&#x202F;&#x00B1;&#x202F;3.5&#x202F;years) participated in the experiment. Informed written consent was obtained from all participants. One participant was excluded for not completing the entire experiment, resulting in a final sample of 13 participants. All participants reported normal or corrected-to-normal vision and were right-handed. The study was approved by the Biological and Medical Ethics Committee of Beihang University and was conducted in accordance with the World Medical Association Declaration of Helsinki.</p>
</sec>
<sec id="sec4">
<label>2.2</label>
<title>Task and experimental procedure</title>
<p>Considering that humans are more susceptible to attentional lapses during prolonged continuous tasks than discrete ones (<xref ref-type="bibr" rid="ref46">Rahman et al., 2021</xref>; <xref ref-type="bibr" rid="ref47">Reteig et al., 2019</xref>), we designed a novel continuous force control task to induce mind wandering, instead of using the discrete paradigms from prior studies (<xref ref-type="bibr" rid="ref44">Peng et al., 2021</xref>; <xref ref-type="bibr" rid="ref68">Zhang et al., 2023</xref>). Herein, participants held a pen-shaped handle to exert force while tracking a periodically varying target force. As shown in <xref ref-type="fig" rid="fig1">Figure 1a</xref>, participants were seated in front of a computer screen and held the handle with their dominant hand. They maintained a naturalistic pen-holding posture, mimicking handwriting. The handle was an end effector of a haptic device (Touch, 3D Systems Inc., United States). The haptic device allowed six degrees of freedom in movement and three degrees of freedom in force feedback within a 26.5&#x202F;&#x00D7;&#x202F;24.1&#x202F;&#x00D7;&#x202F;8.9&#x202F;cm workspace. A virtual 3D scene was constructed, displaying a gray circular object, an annular object, and a pen-shaped object. The circular and annular objects remained fixed, while the pen-shaped virtual object was attached to the real-world handle. As participants moved the handle in the real space, the virtual pen synchronously performed the same movement on the screen.</p>
<fig position="float" id="fig1">
<label>Figure 1</label>
<caption>
<p>Task design and experimental procedure. <bold>(a)</bold> Continuous force control task. Participants were instructed to adjust the width of a clockwise moving trajectory by modulating their output force, aiming to match the gray target zone as closely as possible. <bold>(b)</bold> Experimental procedure. Force data and EEG data were recorded during the entire experiment. With an interval of 40&#x2013;50s, a thought probe would appear on the screen, requiring participants to report the degree to which their thoughts wandering from the task. The 3-s data preceding each thought probe was extracted as a single trial.</p>
</caption>
<graphic xlink:href="fnins-19-1654827-g001.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">Diagram illustrating an experimental setup for a force control task with EEG monitoring. Panel (a) shows a participant interacting with a haptic device while visualizing trajectory tasks on a screen. The visual panel includes trajectory direction and feedback, with force exertion indicated. Panel (b) outlines the trial structure over time, including force and EEG data collection, and thought probes.</alt-text>
</graphic>
</fig>
<p>During the task, a black dot moved clockwise around the gray annular region at a constant speed of 20&#x00B0;/s. The diameter of the black dot (<italic>D</italic><sub>o</sub>, in millimeters, mm) scaled proportionally to the contact force (<italic>F</italic><sub>o</sub>, in newtons, N) exerted by the user&#x2019;s virtual pen on the circular palette, as shown in <xref ref-type="disp-formula" rid="EQ1">Equation 1</xref>:</p>
<disp-formula id="EQ1">
<label>(1)</label>
<mml:math id="M1">
<mml:msub>
<mml:mi>D</mml:mi>
<mml:mi mathvariant="normal">o</mml:mi>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mi>k</mml:mi>
<mml:mo>&#x22C5;</mml:mo>
<mml:msub>
<mml:mi>F</mml:mi>
<mml:mi mathvariant="normal">o</mml:mi>
</mml:msub>
<mml:mo>+</mml:mo>
<mml:mn>0.5</mml:mn>
</mml:math>
</disp-formula>
<p>where <italic>k</italic> denotes a scaling constant (<italic>k</italic>&#x202F;=&#x202F;3&#x202F;mm/N). This value was empirically determined to ensure that the dot&#x2019;s size varied within a perceptually noticeable but not overly intrusive range (approximately 2.0&#x2013;10.0&#x202F;mm) across the allowable force range of 0.5&#x2013;3.17&#x202F;N. A linear mapping was adopted due to its intuitiveness, allowing participants to quickly learn the relationship between applied force and visual feedback. This helped reduce the learning burden and enabled participants to focus on force output regulation. The width of the gray annular region varied nonlinearly over each 60-degree cycle. Participants were required to adjust their exerted force based on the dot&#x2019;s position to align its trajectory with the target zone. The circular palette was established using sphere tree models with Solidworks (Dassault Systems Inc., United States) and 3D Studio Max (Autodesk Inc., United States). Real-time force feedback was implemented using a validated haptic rendering algorithm (<xref ref-type="bibr" rid="ref63">Wang et al., 2013</xref>). Exerted forces were recorded at a 1,000&#x202F;Hz sampling rate using the haptic device and custom scripts developed with Microsoft Foundation Classes (MFC).</p>
<p>The experimental procedure is illustrated in <xref ref-type="fig" rid="fig1">Figure 1b</xref>. Each participant completed three sessions of the force control task following a practice session. Each session lasted approximately 10 min, with a short break (1&#x2013;2&#x202F;min) to eliminate muscle fatigue. Participants were instructed to focus on the moving dot and modulate their exerted force to match the target zone&#x2019;s width. At random intervals (40&#x2013;50&#x202F;s), a probe question appeared on the screen asking &#x2018;What are you thinking about? Task or Something else?&#x2019; Participants rated their attentional states on a 0&#x2013;100 scale, where 0 indicated being completely focused on the task and 100 indicated complete distraction (<xref ref-type="bibr" rid="ref29">Kucyi et al., 2016</xref>). EEG and force data from the 3-s time window preceding each thought probe were extracted as individual trials for subsequent analyses.</p>
</sec>
<sec id="sec5">
<label>2.3</label>
<title>Behavioral data analysis</title>
<p>During the continuous force control task, the exerted force data were recorded at a sampling rate of 1,000&#x202F;Hz. Because the target force (<italic>F</italic><sub>target</sub>), corresponding to the width of the target zone, varied nonlinearly over a 3-s cycle (i.e., 60 degrees), the target force pattern in the 3&#x202F;s preceding each probe was consistent across trials. As shown in <xref ref-type="fig" rid="fig2">Figure 2a</xref>, we calculated the force error within the 3-s period to assess the performance of force control for each trial. The force error (<italic>BE</italic><sub>NL</sub>) represented the relative difference between the output force (<italic>F</italic><sub>output</sub>) and the target force (<italic>F</italic><sub>target</sub>). It was computed at each sampling point and analyzed using sliding windows (1-s length, 90% overlap) to quantify the variation of force errors within each trial. <italic>BE</italic><sub>NL</sub> was computed as <xref ref-type="disp-formula" rid="EQ2">Equation 2</xref>:</p>
<disp-formula id="EQ2">
<label>(2)</label>
<mml:math id="M2">
<mml:mi>B</mml:mi>
<mml:msub>
<mml:mi>E</mml:mi>
<mml:mi>NL</mml:mi>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:msqrt>
<mml:mfrac>
<mml:mrow>
<mml:mo>&#x2211;</mml:mo>
<mml:msup>
<mml:mrow>
<mml:mo stretchy="true">(</mml:mo>
<mml:msub>
<mml:mi>F</mml:mi>
<mml:mtext>target</mml:mtext>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>F</mml:mi>
<mml:mtext>output</mml:mtext>
</mml:msub>
<mml:mo stretchy="true">)</mml:mo>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
<mml:mrow>
<mml:mo>&#x2211;</mml:mo>
<mml:msup>
<mml:msub>
<mml:mi>F</mml:mi>
<mml:mtext>target</mml:mtext>
</mml:msub>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:mfrac>
</mml:msqrt>
</mml:math>
</disp-formula>
<p>where the value of <italic>BE</italic><sub>NL</sub> ranges from 0 to 1, with smaller values indicating better task performance. We also calculated the sum of <italic>BE</italic><sub>NL</sub> within each trial, termed <italic>BE</italic><sub>NL-trial</sub>, to assess the overall behavioral performance. <xref ref-type="fig" rid="fig2">Figure 2b</xref> shows examples of <italic>BE</italic><sub>NL-trial</sub> values for two trials, where a smaller value indicates better force control performance.</p>
<fig position="float" id="fig2">
<label>Figure 2</label>
<caption>
<p>Behavioral performance during the force control task. <bold>(a)</bold> Example data of the target force, output force, and the force error <italic>BE</italic><sub>NL</sub> within a trial. <bold>(b)</bold> Examples of the overall force error <italic>BE</italic><sub>NL-trial</sub> during two trials. The left panel with a lower value of <italic>BE</italic><sub>NL-trial</sub> (0.06) shows relatively good performance and the right panel with a higher value of <italic>BE</italic><sub>NL-trial</sub> (0.29) shows relatively poor performance.</p>
</caption>
<graphic xlink:href="fnins-19-1654827-g002.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">Graph (a) shows two overlapping line graphs, 'F_target' in blue and 'F_output' in orange, plotted against time. A third line in purple represents BE_NL. Graph (b) has two panels showing the same data, with the left panel indicating a BE_NL-trial of 0.06 and the right panel 0.29. Both panels have blue and orange lines representing different force measurements over time.</alt-text>
</graphic>
</fig>
<p>Additionally, based on the participants&#x2019; self-reported ratings, trials were sorted by quartiles for each subject. Specifically, trials were divided into four groups based on the quartiles of the rating scores. Trials with ratings in the lowest quartile (&#x2264;25%) were classified as task-focused (onT), and those in the highest quartile (&#x2265;75%) as mind-wandering (MW).</p>
</sec>
<sec id="sec6">
<label>2.4</label>
<title>EEG data acquisition and analysis</title>
<sec id="sec7">
<label>2.4.1</label>
<title>EEG recording and pre-processing</title>
<p>EEG data were recorded from 64 Ag/AgCl electrodes positioned according to the international 10&#x2013;20 system, using an NSW364 wireless amplifier (Neuracle Technology Co., Ltd., China) at a sampling rate of 1,000&#x202F;Hz. The impedance of the electrodes was maintained below 5 k&#x03A9; using NaCl-based conductive gel (<xref ref-type="bibr" rid="ref32">Liu et al., 2019</xref>). The reference electrode was placed at the CPz. EEG recordings were synchronized to the force control task, and event times (thought probe onsets) were automatically documented with markers in the continuous EEG data files.</p>
<p>Raw EEG data were preprocessed offline using EEGLAB v13.6.5b (<xref ref-type="bibr" rid="ref10">Delorme and Makeig, 2004</xref>), an open-source toolbox running in MATLAB R2021a (MathWorks Inc., United States). The EEG data were first bandpass filtered between 2 and 45 Hz using a zero-phase FIR filter with a Hamming window (function <italic>pop_eegfiltnew</italic>). The filtered data were re-referenced using the REST toolbox (<xref ref-type="bibr" rid="ref11">Dong et al., 2017</xref>). For each participant, data segments within each trial were extracted and concatenated for further analysis. Subsequently, artifact rejection was performed on the concatenated EEG data in three steps. First, bad channels were identified and removed using the <italic>pop_rejchan</italic> function in EEGLAB based on probability, resulting in the removal of an average of 1.0 (SD&#x202F;=&#x202F;1.15) channels per participant. Second, the remaining data were decomposed using logistic infomax Independent Component Analysis (ICA; function <italic>pop_runica</italic>), and artifact components were identified and rejected using the MARA plugin (<xref ref-type="bibr" rid="ref66">Winkler et al., 2011</xref>, <xref ref-type="bibr" rid="ref65">2014</xref>). Third, the signals were back-projected to the sensor level, and any rejected bad channels were interpolated using the <italic>pop_ interp</italic> function. Finally, the cleaned EEG data for each participant were categorized into two separate sets: on-task (onT) or mind-wandering (MW).</p>
<p>It should be noted that after the ICA-based artifact removal, EEG data from nine participants were included in the subsequent analysis; data from the other four participants were excluded due to extensive artifacts exceeding &#x00B1;150&#x202F;&#x03BC;V (<xref ref-type="bibr" rid="ref69">Zhang et al., 2019</xref>; <xref ref-type="bibr" rid="ref44">Peng et al., 2021</xref>).</p>
</sec>
<sec id="sec8">
<label>2.4.2</label>
<title>Feature extraction</title>
<sec id="sec9">
<label>2.4.2.1</label>
<title>Spectral power</title>
<p>At each electrode, a short-time Fourier transform was performed on the preprocessed EEG data to estimate the power spectrum for each trial. The sliding window length was set to 1 s with 90% overlap between successive segments. Subsequently, EEG power spectra were extracted into five frequency bands: delta (1&#x2013;4&#x202F;Hz), theta (4&#x2013;8&#x202F;Hz), alpha (8&#x2013;13&#x202F;Hz), beta (13&#x2013;30&#x202F;Hz) and gamma (30&#x2013;45&#x202F;Hz), and log-transformed. For each condition, the spectral powers of all trials were averaged to obtain the mean band power for each frequency band at each electrode. The group-level average power for each frequency band was calculated by averaging the band power across all participants.</p>
</sec>
<sec id="sec10">
<label>2.4.2.2</label>
<title>Cross-frequency coupling: alpha-theta ratios</title>
<p>Given reports of increased alpha-theta harmonicity and phase synchrony during MW, this study assessed alpha-theta cross-frequency coupling during the force control task. Following previous studies (<xref ref-type="bibr" rid="ref49">Rodriguez-Larios and Alaerts, 2021</xref>), we used the findpeaks approach to compute the cross-frequency ratios between the theta and alpha bands. Specifically, after applying short-time Fourier transformations (sliding window&#x202F;=&#x202F;100&#x202F;ms) to the preprocessed EEG data to compute the time-varying spectrum between 4 and 13 Hz, we applied the <italic>findpeaks</italic> function to detect transient peak frequencies in the theta (4&#x2013;8&#x202F;Hz) and alpha (8&#x2013;13&#x202F;Hz) bands separately. When more than one peak was detected, the frequency with the highest amplitude was selected as the peak frequency. The algorithm detected at least one peak in 99.52% (SD&#x202F;=&#x202F;0.20%) of alpha-band time points and 94.11% (SD&#x202F;=&#x202F;1.19%) of theta-band time points. The identified transient peak frequencies in the alpha and theta bands were used to compute their numerical ratio per time point. To analyze the distribution of these ratios, we binned them in steps of 0.1 across a range of 1.1&#x2013;3.3 and calculated the proportion of time points falling into each bin. Proportions of cross-frequency ratios were computed per epoch and averaged within the same condition (onT or MW) for each participant and electrode. This yielded a normalized distribution across ratio bins for each condition, participant, and electrode, which we refer to as the probability density of alpha-theta ratios. In this framework, a higher probability density at a specific ratio indicates that the alpha-theta system was more likely to align around that harmonic relationship during the task. Neurophysiologically, this suggests that certain stable harmonic states are more prevalent, which may reflect a mechanism that facilitates efficient cross-frequency synchronization necessary for sustained attention (<xref ref-type="bibr" rid="ref48">Rodriguez-Larios and Alaerts, 2019</xref>).</p>
</sec>
<sec id="sec11">
<label>2.4.2.3</label>
<title>Functional connectivity: within-band coupling between electrode pairs</title>
<p>Phase Locking Value (PLV) quantifies phase synchronization between electrode pairs by measuring the absolute value of the mean phase difference between two signals as a complex unit-length vector (<xref ref-type="bibr" rid="ref51">Rosenberg et al., 1989</xref>; <xref ref-type="bibr" rid="ref1">Aydore et al., 2013</xref>). PLV is a measure of pairwise functional connectivity commonly used to quantify the phase coupling between two nonlinear signals. It has a range from 0 to 1, where a value of 0 indicates no phase coupling and a value of 1 indicates complete phase locking. In this study, we computed sensor-level PLV between all electrode pairs using the <italic>ft_connectivityanalysis</italic> function from the FieldTrip toolbox (<xref ref-type="bibr" rid="ref42">Oostenveld et al., 2011</xref>), generating five 64&#x202F;&#x00D7;&#x202F;64 connectivity matrices (one matric per band). To assess their significance, we employed the Network-Based Statistic toolbox to perform permutation testing on the connectivity matrices (<xref ref-type="bibr" rid="ref67">Zalesky et al., 2010</xref>). This approach allows us to control for family-wise error rates while identifying significant network components. We also extracted community connectivity within and across distinct brain regions and performed repeated one-way analysis of variance (ANOVA) to assess the significance of connectivity patterns among predefined brain communities.</p>
</sec>
<sec id="sec12">
<label>2.4.2.4</label>
<title>Neural-behavioral synchronization: mutual information</title>
<p>Mutual information (MI) between EEG power amplitude and force error was computed to quantify the neural-behavioral synchronization. MI (<italic>X</italic>, <italic>Y</italic>) indicates both linear and nonlinear statistical dependencies between two variables <italic>X</italic> and <italic>Y</italic>, which can be computed as <xref ref-type="disp-formula" rid="EQ3">Equation 3</xref>:</p>
<disp-formula id="EQ3">
<label>(3)</label>
<mml:math id="M3">
<mml:mi mathvariant="italic">MI</mml:mi>
<mml:mo stretchy="true">(</mml:mo>
<mml:mi>X</mml:mi>
<mml:mo>,</mml:mo>
<mml:mi>Y</mml:mi>
<mml:mo stretchy="true">)</mml:mo>
<mml:mo>=</mml:mo>
<mml:mi>H</mml:mi>
<mml:mo stretchy="true">(</mml:mo>
<mml:mi>X</mml:mi>
<mml:mo stretchy="true">)</mml:mo>
<mml:mo>+</mml:mo>
<mml:mi>H</mml:mi>
<mml:mo stretchy="true">(</mml:mo>
<mml:mi>Y</mml:mi>
<mml:mo stretchy="true">)</mml:mo>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>H</mml:mi>
<mml:mo stretchy="true">(</mml:mo>
<mml:mi>X</mml:mi>
<mml:mo>,</mml:mo>
<mml:mi>Y</mml:mi>
<mml:mo stretchy="true">)</mml:mo>
</mml:math>
</disp-formula>
<p>where <italic>H</italic>(X) and <italic>H</italic>(<italic>Y</italic>) denote the priori uncertainty of <italic>X</italic> and <italic>Y</italic>, respectively. <italic>H</italic>(<italic>X</italic>, <italic>Y</italic>) denotes the posteriori uncertainty on <italic>X</italic> when the measurement of <italic>Y</italic> is given. The value of MI(<italic>X</italic>, <italic>Y</italic>) answers the question: &#x201C;Given a measure of <italic>Y</italic>, how many bits of information about <italic>X</italic> can be predicted on average?&#x201D; (<xref ref-type="bibr" rid="ref24">John et al., 2018</xref>; <xref ref-type="bibr" rid="ref31">Liang et al., 2022</xref>). In this study, we computed MI between the within-trial force error (<italic>BE</italic><sub>NL</sub>) and the corresponding EEG power amplitude (obtained using the short-time Fourier transform) for each electrode. For each subject, the MI values formed a multidimensional array with dimensions: frequency point &#x00D7; electrode &#x00D7; trial.</p>
</sec>
</sec>
</sec>
<sec id="sec13">
<label>2.5</label>
<title>Features selection</title>
<p>All the behavioral and EEG features were first compared between conditions. To identify significant effects, we employed appropriate statistical tests (<italic>t</italic>-tests or cluster-based permutation testing), and only these significant features were retained for subsequent classification. Specifically, for the behavioral force errors, data were first averaged across trials for each condition within subjects, and then paired-sample <italic>t</italic>-tests were performed to evaluate condition-related differences. For EEG features (i.e., spectral power, alpha-theta ratios, PLV, and MI), cluster-based permutation testing was adopted to evaluate condition-related differences. This nonparametric statistical approach controls the family-wise type I error rate that arises from multiple comparisons across electrodes and frequency bins by employing Monte Carlo randomization. Briefly, the data were shuffled (1,000 permutations) to estimate a null distribution of effect sizes based on cluster-level statistics&#x2014;specifically, the sum of <italic>t</italic>-values with the same sign across adjacent electrodes, frequencies, or ratios. The cluster-corrected <italic>p</italic>-value was defined as the proportion of permuted datasets in which the cluster-level statistic exceeded that of the original data (cluster-defining threshold: <italic>p</italic>&#x202F;&#x003C;&#x202F;0.05). Statistically significant features and electrodes were then selected as the final feature subset and used as inputs for the classifier. We chose cluster-based permutation testing because this nonparametric method can effectively control false positives under multiple comparison corrections while accounting for the spatial and spectral contiguity of EEG data (<xref ref-type="bibr" rid="ref36">Maris and Oostenveld, 2007</xref>). This approach yields interpretable clusters of features rather than isolated points, which is particularly appropriate for EEG and connectivity analyses (<xref ref-type="bibr" rid="ref45">Pernet et al., 2015</xref>).</p>
</sec>
<sec id="sec14">
<label>2.6</label>
<title>Classifier training and validation</title>
<p>Using the identified features as inputs and the attentional state labels (onT or MW) as outputs, we trained the classifiers using the support vector machine (SVM) algorithm to decode attentional states for each trial. Although previous studies have employed various machine learning algorithms, such as decision trees (<xref ref-type="bibr" rid="ref61">Tasika et al., 2020</xref>), random forests (<xref ref-type="bibr" rid="ref3">Chen et al., 2022</xref>), and artificial neural networks (<xref ref-type="bibr" rid="ref20">Hosseini and Guo, 2019</xref>), this study applied SVM due to its suitability for small-sized datasets with low-dimensional data. Moreover, most prior studies on attention decoding have reported superior classification performance using SVM (<xref ref-type="bibr" rid="ref22">Jin et al., 2019</xref>, <xref ref-type="bibr" rid="ref23">2020</xref>; <xref ref-type="bibr" rid="ref12">Dong et al., 2021</xref>; <xref ref-type="bibr" rid="ref25">Jothiral et al., 2025</xref>). A radial basis function (RBF) was selected as the kernel function, and the default parameter settings in LIBSVM were applied (i.e., penalty parameter C&#x202F;=&#x202F;1 and kernel parameter <italic>&#x03B3;</italic>&#x202F;=&#x202F;1/feature dimension). To ensure comparability across features, all input features were z-score standardized within each training fold before model fitting, and the same transformation was applied to the corresponding test fold.</p>
<p>Two cross-validation strategies were employed to evaluate the trained models:</p>
<list list-type="simple">
<list-item>
<p>(1) Leave-one-subject-out (LOSO) cross-validation for cross-participant evaluation: in each iteration, one participant&#x2019;s data were used as the test dataset, while data from the remaining <italic>N</italic>-1 participants formed the training dataset. This process was repeated <italic>N</italic> times, where <italic>N</italic> denotes the number of participants included in the analysis (<italic>N&#x202F;=</italic> 9). The mean classification performance was then averaged across all iterations.</p>
</list-item>
<list-item>
<p>(2) Five-fold cross-validation for within-participant evaluation: for each participant, trials were randomly divided into five subsets while preserving class balance (i.e., each fold contained equal numbers of onT and MW trials). In each round, one subset served as the test dataset, while the remaining four subsets were used for training. The average classification performance was computed across all folds and all participants.</p>
</list-item>
</list>
<p>Finally, we computed five commonly used metrics&#x2014;accuracy, recall, precision, F1-score, and the area under the receiver operating characteristic curve (AUC)&#x2014;to comprehensively evaluate the effectiveness of the selected features in classifying attentional states. The classification performance for various feature combinations was also assessed using these metrics.</p>
</sec>
</sec>
<sec sec-type="results" id="sec15">
<label>3</label>
<title>Results</title>
<sec id="sec16">
<label>3.1</label>
<title>Self-reported ratings and behavioral performance</title>
<p>To assess whether the off-task ratings reported by participants increased over time during the force control task, we performed a linear regression analysis on the z-scored ratings across trials for each participant. <xref ref-type="fig" rid="fig3">Figure 3a</xref> illustrates the rating scores of a representative participant across all trials (blue dots), along with the corresponding regression-fitted curve (red line). <xref ref-type="fig" rid="fig3">Figure 3b</xref> presents the group-averaged slope values of the regression-fitted curves. A one-tailed one-sample <italic>t</italic>-test confirmed that the slope was significantly greater than zero [<italic>t</italic>(8)&#x202F;=&#x202F;2.53, <italic>p</italic>&#x202F;=&#x202F;0.018], indicating a significant upward trend in self-reported off-task ratings over time. For the trial-level force error (i.e., <italic>BE</italic><sub>NL-trial</sub>), we performed a similar linear regression on the z-scored values across trials for each participant. <xref ref-type="fig" rid="fig3">Figure 3c</xref> shows the <italic>BE</italic><sub>NL-trial</sub> values of a representative participant with the corresponding regression-fitted curve, and <xref ref-type="fig" rid="fig3">Figure 3d</xref> summarizes the slope values for all participants. A one-tailed one-sample <italic>t</italic>-test on these slope values revealed a significant increasing trend in force error over trials (<italic>t</italic>(8)&#x202F;=&#x202F;3.16, <italic>p</italic>&#x202F;=&#x202F;0.007). The <italic>z</italic>-scored MW ratings and <italic>BE</italic><sub>NL-trial</sub> data for all participants are provided in <xref rid="SM1" ref-type="supplementary-material">Supplementary Figures 1, 2</xref>, respectively.</p>
<fig position="float" id="fig3">
<label>Figure 3</label>
<caption>
<p>Behavioral measures of attentional states. <bold>(a)</bold> <italic>Z</italic>-scored off-task ratings (blue dots) and linear regression fit (red line) for a single representative participant. <bold>(b)</bold> Group-level slope coefficients derived from the linear regression of z-scored off-task ratings. <bold>(c)</bold> <italic>Z</italic>-scored trial-level force error (<italic>BE</italic><sub>NL-trial</sub>; blue dots) and linear regression fit (red line) for the representative participant. <bold>(d)</bold> Group-level slope coefficients for z-scored <italic>BE</italic><sub>NL-trial</sub>. <bold>(e)</bold> Trial classification based on rating quartiles (onT: green; MW: yellow). <bold>(f)</bold> Comparison of force error between onT and MW conditions.</p>
</caption>
<graphic xlink:href="fnins-19-1654827-g003.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">Scatter and box plots analyze cognitive task data. Graph a (scatter plot) shows a slight positive correlation (r = 0.042) between trials and z-scored off-task ratings for a single subject. Graph b (box plot) presents distribution of z-scored rating slopes across subjects. Graph c (scatter plot) illustrates a similar trend (r = 0.048) for z-scored BE_NL_trial. Graph d (box plot) shows distribution of slope of z-scored BE_NL_trial across subjects. Graph e (3D scatter plot) visualizes ratings against BE_NL_trial and trials, distinguishing on-task (onT) and mind-wandering (MW). Graph f (box plot) compares BE_NL_trial between onT and MW tasks across subjects.</alt-text>
</graphic>
</fig>
<p>During the experiment, seven participants completed 36 trials and the other two participants completed 48 trials. As shown in <xref ref-type="fig" rid="fig3">Figure 3e</xref>, trials were classified based on rating quartiles: the trials in the lowest quartile (&#x2264;25%) were labeled as onT condition, and those in the highest quartile (&#x2265;75%) as MW condition. Consequently, two participants contributed 12 trials per condition, while the remaining seven participants contributed 9 trials per condition. Group-averaged force errors (<italic>BE</italic><sub>NL-trial</sub>) for the two conditions are presented in <xref ref-type="fig" rid="fig3">Figure 3f</xref>. Paired-sample <italic>t</italic>-tests revealed significantly higher force errors during the MW condition (0.24&#x202F;&#x00B1;&#x202F;0.07) compared to the onT condition [0.17&#x202F;&#x00B1;&#x202F;0.03; <italic>t</italic>(8)&#x202F;=&#x202F;3.44, <italic>p</italic>&#x202F;=&#x202F;0.004].</p>
</sec>
<sec id="sec17">
<label>3.2</label>
<title>Condition-specific differences in spectral power</title>
<p>For the trials classified as onT and MW, power spectra were estimated at each electrode for every frequency point between 2 and 45&#x202F;Hz in a step of 0.1&#x202F;Hz. Paired-sample <italic>t</italic>-tests were conducted to assess the power differences between conditions, with t-values (onT minus MW) visualized as spatial-frequency topographies (<xref ref-type="fig" rid="fig4">Figure 4a</xref>). Cluster-based permutation tests were then applied, and <italic>t</italic>-values that survived the significance threshold (<italic>p</italic>-cluster &#x003C;0.025) are shown in the lower panel of <xref ref-type="fig" rid="fig4">Figure 4a</xref>. The analysis revealed two significant clusters in the low alpha band (8&#x2013;10&#x202F;Hz). Cluster 1 was primarily distributed over the frontal region, encompassing electrodes FPz, Fz, FCz, FP1, AF7, AF3, F1, F3, F5, F7, FC1, FC3, FC5, FT7, FP2, AF8, AF4, F2, F4, F6, F8, FC2, FC4, and FC6. Cluster 2 was mainly distributed over the posterior region, involving electrodes POz, Oz, Pz, TP7, P7, P5, P3, PO7, PO5, PO3, TP8, CP6, P4, P6, P8, PO4, PO6, and PO8. Additionally, cluster-averaged power within the 8&#x2013;10&#x202F;Hz band was compared between the two conditions. As shown in <xref ref-type="fig" rid="fig4">Figure 4b</xref>, the cluster-averaged power during the MW condition was significantly higher than during the onT condition in both the frontal region (MW&#x202F;=&#x202F;0.142&#x202F;&#x00B1;&#x202F;0.079, onT&#x202F;=&#x202F;0.068&#x202F;&#x00B1;&#x202F;0.029; <italic>p</italic>&#x202F;&#x003C;&#x202F;0.001) and the posterior region (MW&#x202F;=&#x202F;0.170&#x202F;&#x00B1;&#x202F;0.089, onT&#x202F;=&#x202F;0.074&#x202F;&#x00B1;&#x202F;0.039; <italic>p</italic>&#x202F;&#x003C;&#x202F;0.001).</p>
<fig position="float" id="fig4">
<label>Figure 4</label>
<caption>
<p>Power differences between onT and MW conditions. <bold>(a)</bold> The top panel presents the t-value maps from paired-sample <italic>t</italic>-tests (onT minus MW) for each frequency point and each electrode. The bottom panel represents two significant clusters (<italic>p</italic>-cluster &#x003C;0.025) at 8-10&#x202F;Hz. <bold>(b)</bold> The left panels present scalp topographies of 8-10&#x202F;Hz <italic>t</italic>-values. Electrodes that survived from the permutation test were marked with black stars. The right panels show cluster-averaged power within 8&#x2013;10&#x202F;Hz under onT and MW conditions. &#x002A;&#x002A;<italic>p</italic>&#x202F;&#x003C;&#x202F;0.001.</p>
</caption>
<graphic xlink:href="fnins-19-1654827-g004.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">The image consists of three panels: (a) A heatmap showing t-values across various brain regions and frequencies, indicating power differences between mind-wandering (MW) and on-task (onT) states. (b) Two topographical brain maps displaying significant clusters at 8-10 Hz for MW versus onT conditions, highlighting increased activity. The right contains box plots comparing averaged power for onT and MW states, with MW showing higher power in both clusters, marked by asterisks indicating statistical significance.</alt-text>
</graphic>
</fig>
<p>To further localize the cortical sources associated with attentional fluctuations during the force control task, we conducted source localization analysis using a beamformer algorithm implemented in FieldTrip (<xref ref-type="bibr" rid="ref42">Oostenveld et al., 2011</xref>). For each participant, source activity estimates were obtained for both the onT and MW conditions within the 8&#x2013;10&#x202F;Hz band. Paired-sample <italic>t</italic>-tests were used to assess differences in neural activity between the two conditions, followed by cluster-based permutation testing (<italic>p</italic>-cluster &#x003C;0.025). As shown in <xref ref-type="fig" rid="fig5">Figure 5</xref>, the average power in the left middle occipital gyrus (MNI: [&#x2212;30&#x2013;70 12]) was significantly higher during the MW condition (3.51&#x202F;&#x00B1;&#x202F;0.39&#x202F;dB) than during the onT condition [3.36&#x202F;&#x00B1;&#x202F;0.40&#x202F;dB; <italic>t</italic>(8)&#x202F;=&#x202F;4.99, <italic>p</italic>&#x202F;=&#x202F;0.0084].</p>
<fig position="float" id="fig5">
<label>Figure 5</label>
<caption>
<p>Source localization results. Beamformer analysis showing increased 8&#x2013;10&#x202F;Hz power during MW relative to onT in the left middle occipital gyrus (MNI [&#x2212;30&#x2013;70 12]).</p>
</caption>
<graphic xlink:href="fnins-19-1654827-g005.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">Brain scans with highlighted regions showing significant activation in red. The color bar indicates t-values from negative five in red to positive five in blue. Part of the occipital lobe is labeled "Occipital Mid L" with a p-value cluster less than 0.025.</alt-text>
</graphic>
</fig>
</sec>
<sec id="sec18">
<label>3.3</label>
<title>Condition-specific differences in alpha-theta ratios</title>
<p>Synchronization between neural oscillations at different frequencies has been proposed as a core mechanism for the coordination and integration of neural systems. Mathematically, when two oscillators with different frequencies form a harmonic relationship (e.g., <italic>f</italic><sub>1</sub>/<italic>f</italic><sub>2</sub>&#x202F;=&#x202F;2), as opposed to a nonharmonic relationship (e.g., <italic>f</italic><sub>1</sub>/<italic>f</italic><sub>2</sub>&#x202F;=&#x202F;1.6), the harmonic arrangement allows for more frequent excitatory phase meetings, thereby facilitating cross-frequency synchronization. In line with this principle, recent theoretical frameworks suggest that shifts in oscillatory peak frequencies constitute a principal mechanism for implementing cross-frequency coupling and decoupling in the brain (<xref ref-type="bibr" rid="ref48">Rodriguez-Larios and Alaerts, 2019</xref>, <xref ref-type="bibr" rid="ref49">2021</xref>; <xref ref-type="bibr" rid="ref50">Rodriguez-Larios et al., 2020</xref>). Following this framework, we quantified cross-frequency coupling by analyzing peak frequency ratios between different bands and compared them between onT and MW conditions during the force control task. Given the established role of alpha-theta coupling in tasks involving attention and executive control, we specifically examined peak frequency ratios between the alpha and theta bands. <xref ref-type="fig" rid="fig6">Figure 6a</xref> illustrates trial-wise variability in alpha and theta peak frequencies, as well as their corresponding numerical ratios over a 3-s period for a representative participant and electrode. <xref ref-type="fig" rid="fig6">Figure 6b</xref> compares the spectral power in two representative trials: one showing a harmonic alpha-theta ratio (2.04) and the other a non-harmonic ratio (1.62). <xref ref-type="fig" rid="fig6">Figure 6c</xref> shows the distribution of alpha and theta peak frequencies across all trials for the same participant and electrode.</p>
<fig position="float" id="fig6">
<label>Figure 6</label>
<caption>
<p>Peak frequency detection of alpha and theta bands and ratio calculation. <bold>(a)</bold> Temporal variability of peak frequencies and ratios for an exemplary participant and electrode. <bold>(b)</bold> Spectral power in two exemplary trials: alpha and theta peak frequencies formed a harmonic (ratio&#x202F;=&#x202F;2.04) versus a non-harmonic (ratio&#x202F;=&#x202F;1.62) relationship. <bold>(c)</bold> Distribution of peak frequencies across trials.</p>
</caption>
<graphic xlink:href="fnins-19-1654827-g006.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">Graphs illustrating theta and alpha peak frequencies. Panel a shows fluctuating theta and alpha peaks over time, with a frequency ratio graph. Panel b compares power spectra, indicating alpha/theta peak ratios. Panel c presents frequency occurrence histograms, distinguishing theta and alpha frequencies.</alt-text>
</graphic>
</fig>
<p>Differences between onT and MW conditions were further assessed using paired-sample <italic>t</italic>-tests for each electrode and cross-frequency ratio (ranging from 1.1 to 3.3 in a step of 0.1). Cluster-based permutation testing was applied to identify significant clusters based on adjacency in electrode space and cross-frequency ratio, while controlling for multiple comparisons. As described in Section 2.4.2.2, the distribution of alpha-theta ratios was represented by its probability density, where higher density at a given ratio reflects a greater likelihood of oscillatory alignment around that harmonic relationship. <xref ref-type="fig" rid="fig7">Figure 7a</xref> shows the probability density of each ratio averaged across all electrodes for the onT and MW conditions. <xref ref-type="fig" rid="fig7">Figure 7b</xref> visualizes the condition differences in probability density by plotting t-values for each ratio and electrode. Positive <italic>t</italic>-values (shown in cold colors) indicate higher probability density during the onT condition compared to the MW condition. A negative cluster (i.e., MW&#x202F;&#x003E;&#x202F;onT) was observed at posterior electrodes within the lower ratio range (1.2&#x2013;1.7), although it did not reach significance in the permutation test. However, a significant positive cluster was identified within the 2.6&#x2013;3.0 ratio range, indicating a significantly higher probability density during the onT condition compared to the MW condition (<italic>p</italic>-cluster&#x202F;&#x003C;&#x202F;0.05). <xref ref-type="fig" rid="fig7">Figure 7c</xref> presents the topographical heat map of t-values averaged over the 2.6&#x2013;3.0 ratio range, with PO3 and PO4 marked as significant electrodes from the permutation test. A repeated one-way ANOVA on the identified cluster confirmed that the cluster-averaged probability density within the 2.6&#x2013;3.0 ratio range was significantly higher during the onT condition (0.299&#x202F;&#x00B1;&#x202F;0.066) than during the MW condition [0.226&#x202F;&#x00B1;&#x202F;0.065; <italic>F</italic><sub>(1,92)</sub>&#x202F;=&#x202F;8.21, <italic>p</italic>&#x202F;=&#x202F;0.0005; <xref ref-type="fig" rid="fig7">Figure 7d</xref>].</p>
<fig position="float" id="fig7">
<label>Figure 7</label>
<caption>
<p>Condition-specific differences in alpha-theta ratios. <bold>(a)</bold> Probability density of each alpha-theta ratio (from 1.1 to 3.3 with a step of 0.1) averaged across electrodes and subjects. Error bars represent standard deviation across subjects. <bold>(b)</bold> <italic>t</italic>-value map of condition differences (onT minus MW). Positive <italic>t</italic>-values shown in cold colors indicate higher probability density under onT condition compared to MW condition. <bold>(c)</bold> Topographical heat map of t-values averaged over 2.6&#x2013;3.0 ratios. Two electrodes (PO3 and PO4) showing significance during the cluster-based permutation statistics are marked by yellow stars. <bold>(d)</bold> Cluster-averaged probability density under onT and MW conditions.</p>
</caption>
<graphic xlink:href="fnins-19-1654827-g007.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">Panel a displays a probability density graph comparing onT and MW states with overlapping data points. Panel b is a heat map showing alpha-theta ratios across various brain regions, indicating T values. Panel c is a scalp map visualizing T values through color gradients on the head surface. Panel d shows box plots of cluster-averaged probability density for onT and MW, with data points illustrating distribution and variability. Each panel contributes to analyzing brain activity differences between onT and MW states.</alt-text>
</graphic>
</fig>
</sec>
<sec id="sec19">
<label>3.4</label>
<title>Condition-specific differences in functional connectivity</title>
<p>Given the significant condition-related differences observed in the 8&#x2013;10&#x202F;Hz band (Section 3.2), we analyzed PLVs between all electrode pairs to investigate functional connectivity differences within this frequency range. Paired-sample <italic>t</italic>-tests with permutation-based correction revealed no significant differences for the onT&#x202F;&#x003E;&#x202F;MW comparison, but significantly higher connectivity during MW compared to onT at specific connections. The statistically significant PLV values and their corresponding electrode pairs are shown in <xref ref-type="fig" rid="fig8">Figure 8a</xref>.</p>
<fig position="float" id="fig8">
<label>Figure 8</label>
<caption>
<p>Functional connectivity differences between onT and MW conditions. <bold>(a)</bold> The left panel illustrates the differences in PLV between the onT and MW conditions by plotting paired-sample <italic>t</italic>-test values. The right panel highlights the connections showing significant increases during MW compared to onT. <bold>(b)</bold> Four electrode categories used for the community connection analysis: F (frontal lobe), PO (parieto-occipital lobe), LC (left central motor area), and RC (right central motor area).</p>
</caption>
<graphic xlink:href="fnins-19-1654827-g008.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">EEG connectivity analysis featuring three panels: (a) a heatmap with pink squares indicating T-values across different EEG channels, (b) a circular diagram showing EEG electrode positions with lines representing connections in varying pink shades, and (c) a head map indicating frontal (F), left central (LC), right central (RC), and parietal occipital (PO) regions.</alt-text>
</graphic>
</fig>
<p>To investigate condition-related connectivity differences from a network-level perspective, electrodes were grouped into four brain regions (<xref ref-type="fig" rid="fig8">Figure 8b</xref>): frontal lobe (F), parieto-occipital lobe (PO), left central motor area (LC), and right central motor area (RC). Community connectivity (CC) was computed by averaging PLVs within or between these defined regions. A repeated-measures one-way ANOVA revealed significantly higher intra-region CC within the LC during the MW condition compared to the onT condition [<italic>F</italic><sub>(1,100)</sub>&#x202F;=&#x202F;19.76, <italic>p</italic>&#x202F;&#x003C;&#x202F;0.001]. Inter-region CC between PO-RC [<italic>F</italic><sub>(1,100)</sub>&#x202F;=&#x202F;9.36, <italic>p</italic>&#x202F;=&#x202F;0.003], F-LC [<italic>F</italic><sub>(1,100)</sub>&#x202F;=&#x202F;6.84, <italic>p</italic>&#x202F;=&#x202F;0.010], and LC-RC [<italic>F</italic><sub>(1,100)</sub>&#x202F;=&#x202F;6.85, <italic>p</italic>&#x202F;=&#x202F;0.010] was also significantly higher during the MW condition than during the onT condition. Descriptive statistics and corresponding significance values are summarized in <xref ref-type="table" rid="tab1">Table 1</xref>.</p>
<table-wrap position="float" id="tab1">
<label>Table 1</label>
<caption>
<p>Descriptive and statistics results of community connection analysis.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th>Brain regions</th>
<th align="center" valign="top">CC-onT<break/>(&#x00B1;SD)</th>
<th align="center" valign="top">CC-MW<break/>(&#x00B1;SD)</th>
<th align="center" valign="top"><italic>F</italic>-values</th>
<th align="center" valign="top"><italic>p</italic>-values</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="middle">F</td>
<td align="center" valign="middle">0.681&#x202F;&#x00B1;&#x202F;0.035</td>
<td align="center" valign="middle">0.675&#x202F;&#x00B1;&#x202F;0.083</td>
<td align="center" valign="middle">0.015</td>
<td align="center" valign="middle">0.904</td>
</tr>
<tr>
<td align="left" valign="middle">LC</td>
<td align="center" valign="middle">0.617&#x202F;&#x00B1;&#x202F;0.053</td>
<td align="center" valign="middle">0.668&#x202F;&#x00B1;&#x202F;0.077</td>
<td align="center" valign="middle">19.761</td>
<td align="center" valign="middle">2.28e-5&#x002A;&#x002A;&#x002A;</td>
</tr>
<tr>
<td align="left" valign="middle">RC</td>
<td align="center" valign="middle">0.606&#x202F;&#x00B1;&#x202F;0.055</td>
<td align="center" valign="middle">0.615&#x202F;&#x00B1;&#x202F;0.107</td>
<td align="center" valign="middle">0.120</td>
<td align="center" valign="middle">0.729</td>
</tr>
<tr>
<td align="left" valign="middle">PO</td>
<td align="center" valign="middle">0.656&#x202F;&#x00B1;&#x202F;0.052</td>
<td align="center" valign="middle">0.643&#x202F;&#x00B1;&#x202F;0.063</td>
<td align="center" valign="middle">2.817</td>
<td align="center" valign="middle">0.096</td>
</tr>
<tr>
<td align="left" valign="middle">F-LC</td>
<td align="center" valign="middle">0.537&#x202F;&#x00B1;&#x202F;0.035</td>
<td align="center" valign="middle">0.566&#x202F;&#x00B1;&#x202F;0.087</td>
<td align="center" valign="middle">6.843</td>
<td align="center" valign="middle">0.010&#x002A;</td>
</tr>
<tr>
<td align="left" valign="middle">F-RC</td>
<td align="center" valign="middle">0.542&#x202F;&#x00B1;&#x202F;0.035</td>
<td align="center" valign="middle">0.559&#x202F;&#x00B1;&#x202F;0.087</td>
<td align="center" valign="middle">1.928</td>
<td align="center" valign="middle">0.168</td>
</tr>
<tr>
<td align="left" valign="middle">F-PO</td>
<td align="center" valign="middle">0.605&#x202F;&#x00B1;&#x202F;0.037</td>
<td align="center" valign="middle">0.610&#x202F;&#x00B1;&#x202F;0.063</td>
<td align="center" valign="middle">0.138</td>
<td align="center" valign="middle">0.711</td>
</tr>
<tr>
<td align="left" valign="middle">LC-RC</td>
<td align="center" valign="middle">0.541&#x202F;&#x00B1;&#x202F;0.051</td>
<td align="center" valign="middle">0.570&#x202F;&#x00B1;&#x202F;0.086</td>
<td align="center" valign="middle">6.848</td>
<td align="center" valign="middle">0.010&#x002A;</td>
</tr>
<tr>
<td align="left" valign="middle">LC-PO</td>
<td align="center" valign="middle">0.520&#x202F;&#x00B1;&#x202F;0.031</td>
<td align="center" valign="middle">0.536&#x202F;&#x00B1;&#x202F;0.075</td>
<td align="center" valign="middle">0.858</td>
<td align="center" valign="middle">0.357</td>
</tr>
<tr>
<td align="left" valign="middle">RC-PO</td>
<td align="center" valign="middle">0.524&#x202F;&#x00B1;&#x202F;0.029</td>
<td align="center" valign="middle">0.565&#x202F;&#x00B1;&#x202F;0.073</td>
<td align="center" valign="middle">9.362</td>
<td align="center" valign="middle">0.003&#x002A;&#x002A;</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p>&#x002A;<italic>p</italic>&#x202F;&#x003C;&#x202F;0.05, &#x002A;&#x002A;<italic>p</italic>&#x202F;&#x003C;&#x202F;0.01, &#x002A;&#x002A;&#x002A;<italic>p</italic>&#x202F;&#x003C;&#x202F;0.001.</p>
</table-wrap-foot>
</table-wrap>
</sec>
<sec id="sec20">
<label>3.5</label>
<title>Condition-specific differences in neural-behavioral synchronization</title>
<p>Mutual information (MI) between the within-trial force error (<italic>BE</italic><sub>NL</sub>) and EEG power amplitude was analyzed to assess neural-behavioral synchronization. <xref ref-type="fig" rid="fig9">Figure 9a</xref> presents paired-sample <italic>t</italic>-test values for each electrode and frequency point (2&#x2013;45&#x202F;Hz in 0.1&#x202F;Hz steps), where positive <italic>t</italic>-values (shown in cold colors) indicate higher MI during the onT condition compared to the MW condition. Cluster-based permutation testing identified a significant negative cluster within the 7.2&#x2013;8.8&#x202F;Hz range (<italic>p</italic>-cluster&#x202F;&#x003C;&#x202F;0.05), indicating significantly stronger neural-behavioral synchronization during the onT condition compared to the MW condition. This negative cluster was located in the anterior region, including electrodes F1, F3, F5, F2, F4, and F6. <xref ref-type="fig" rid="fig9">Figure 9b</xref> shows the topographical <italic>t</italic>-value map averaged across these six electrodes within the 7.2&#x2013;8.8&#x202F;Hz range. A repeated-measures one-way ANOVA on the identified cluster further confirmed significantly higher MI during the onT condition (0.650&#x202F;&#x00B1;&#x202F;0.085) compared to the MW condition [0.530&#x202F;&#x00B1;&#x202F;0.054; <italic>F</italic><sub>(1,92)</sub>&#x202F;=&#x202F;10.69, <italic>p</italic>&#x202F;=&#x202F;0.0015; <xref ref-type="fig" rid="fig9">Figure 9c</xref>].</p>
<fig position="float" id="fig9">
<label>Figure 9</label>
<caption>
<p>Neural&#x2013;behavioral synchronization differences between onT and MW conditions. <bold>(a)</bold> The top panel displays the paired-sample <italic>t</italic>-test values per electrode and frequency point (2&#x2013;45&#x202F;Hz in 0.1&#x202F;Hz step). The bottom panel represents the cluster that survived from the cluster-based permutation test (<italic>p</italic>-value of cluster &#x003C;0.05). <bold>(b)</bold> Scalp topographical map of <italic>t</italic>-values averaged within 7.2&#x2013;8.8&#x202F;Hz, with electrodes that survived the cluster-based permutation test marked by black stars. <bold>(c)</bold> Cluster-averaged MI within 7.2&#x2013;8.8&#x202F;Hz under onT and MW conditions.</p>
</caption>
<graphic xlink:href="fnins-19-1654827-g009.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">An EEG analysis consisting of: (a) a T-value heatmap with electrodes labeled from Fpz to P1 and a frequency range of 4 to 45 Hz; (b) a topographic map showing T-values between 7.2 and 8.8 Hz with areas marked by stars; (c) a box plot comparing cluster-averaged mutual information between on-task (onT) and mind-wandering (MW) conditions.</alt-text>
</graphic>
</fig>
</sec>
<sec id="sec21">
<label>3.6</label>
<title>Attentional states classification results</title>
<p>An SVM-RBF model was trained to classify binary attentional states and evaluated using two cross-validation strategies (i.e., LOSO and 5-fold cross-validation). Each feature that passed the aforementioned statistical tests was evaluated individually in separate classification models. <xref ref-type="table" rid="tab2">Table 2</xref> summarizes the classification performance metrics for each individual feature under both validation strategies. The chance-level accuracy is 50% because the dataset was balanced using the quartile-based labeling approach. Here, MW was defined as the positive class. The single-feature models showed generally comparable classification performance across both validation strategies. Among the four features, spectral power yielded the highest accuracy, precision, F1-score, and AUC. Considering comparability with existing literature, this study primarily focuses on the accuracy and AUC metrics. For the cross-participant classification (i.e., LOSO strategy), the power feature achieved a mean accuracy of 66.99&#x202F;&#x00B1;&#x202F;12.34% and a mean AUC of 76.37&#x202F;&#x00B1;&#x202F;19.26%. Similarly, in the within-participant classification (i.e., 5-fold strategy), the power feature yielded a mean accuracy of 68.28&#x202F;&#x00B1;&#x202F;7.14% and a mean AUC of 71.86&#x202F;&#x00B1;&#x202F;7.05%. For the MI-only model, the recall for the MW class was high, whereas overall accuracy, precision, and AUC remained low. This indicates a bias toward predicting MW trials, leading to numerous false positives for onT trials, as illustrated by the confusion matrices shown in <xref rid="SM1" ref-type="supplementary-material">Supplementary Figure 3</xref>.</p>
<table-wrap position="float" id="tab2">
<label>Table 2</label>
<caption>
<p>Classification performance using individual features under LOSO and 5-fold cross-validation (Mean &#x00B1; SD).</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th>Metrics</th>
<th align="center" valign="top">Spectral power</th>
<th align="center" valign="top">Ratio</th>
<th align="center" valign="top">PLV</th>
<th align="center" valign="top">MI</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top" colspan="5">LOSO cross validation (%)</td>
</tr>
<tr>
<td align="left" valign="top">Accuracy</td>
<td align="center" valign="top"><bold>66.99&#x202F;&#x00B1;&#x202F;12.34</bold></td>
<td align="center" valign="top">54.07&#x202F;&#x00B1;&#x202F;12.89</td>
<td align="center" valign="top">54.26&#x202F;&#x00B1;&#x202F;14.32</td>
<td align="center" valign="top">50.42&#x202F;&#x00B1;&#x202F;7.72</td>
</tr>
<tr>
<td align="left" valign="top">Recall</td>
<td align="center" valign="top">80.00&#x202F;&#x00B1;&#x202F;27.44</td>
<td align="center" valign="top">33.52&#x202F;&#x00B1;&#x202F;22.46</td>
<td align="center" valign="top">41.39&#x202F;&#x00B1;&#x202F;30.47</td>
<td align="center" valign="top">94.54&#x202F;&#x00B1;&#x202F;8.41</td>
</tr>
<tr>
<td align="left" valign="top">Precision</td>
<td align="center" valign="top">64.48&#x202F;&#x00B1;&#x202F;11.18</td>
<td align="center" valign="top">57.41&#x202F;&#x00B1;&#x202F;30.17</td>
<td align="center" valign="top">53.70&#x202F;&#x00B1;&#x202F;28.29</td>
<td align="center" valign="top">50.25&#x202F;&#x00B1;&#x202F;5.43</td>
</tr>
<tr>
<td align="left" valign="top">F1-score</td>
<td align="center" valign="top">68.75&#x202F;&#x00B1;&#x202F;17.67</td>
<td align="center" valign="top">40.04&#x202F;&#x00B1;&#x202F;20.83</td>
<td align="center" valign="top">43.27&#x202F;&#x00B1;&#x202F;25.09</td>
<td align="center" valign="top">65.59&#x202F;&#x00B1;&#x202F;5.43</td>
</tr>
<tr>
<td align="left" valign="top">AUC</td>
<td align="center" valign="top"><bold>76.37&#x202F;&#x00B1;&#x202F;19.26</bold></td>
<td align="center" valign="top">61.65&#x202F;&#x00B1;&#x202F;14.90</td>
<td align="center" valign="top">51.83&#x202F;&#x00B1;&#x202F;22.61</td>
<td align="center" valign="top">45.47&#x202F;&#x00B1;&#x202F;14.93</td>
</tr>
<tr>
<td align="left" valign="top" colspan="5">5-fold cross validation (%)</td>
</tr>
<tr>
<td align="left" valign="top">Accuracy</td>
<td align="center" valign="top"><bold>68.28&#x202F;&#x00B1;&#x202F;7.14</bold></td>
<td align="center" valign="top">54.91&#x202F;&#x00B1;&#x202F;9.99</td>
<td align="center" valign="top">51.32&#x202F;&#x00B1;&#x202F;6.37</td>
<td align="center" valign="top">50.09&#x202F;&#x00B1;&#x202F;6.03</td>
</tr>
<tr>
<td align="left" valign="top">Recall</td>
<td align="center" valign="top">81.35&#x202F;&#x00B1;&#x202F;10.31</td>
<td align="center" valign="top">42.22&#x202F;&#x00B1;&#x202F;25.03</td>
<td align="center" valign="top">40.97&#x202F;&#x00B1;&#x202F;33.58</td>
<td align="center" valign="top">90.26&#x202F;&#x00B1;&#x202F;9.74</td>
</tr>
<tr>
<td align="left" valign="top">Precision</td>
<td align="center" valign="top">65.27&#x202F;&#x00B1;&#x202F;7.52</td>
<td align="center" valign="top">49.12&#x202F;&#x00B1;&#x202F;28.21</td>
<td align="center" valign="top">50.02&#x202F;&#x00B1;&#x202F;13.37</td>
<td align="center" valign="top">50.09&#x202F;&#x00B1;&#x202F;3.32</td>
</tr>
<tr>
<td align="left" valign="top">F1-score</td>
<td align="center" valign="top">71.92&#x202F;&#x00B1;&#x202F;5.64</td>
<td align="center" valign="top">44.88&#x202F;&#x00B1;&#x202F;25.38</td>
<td align="center" valign="top">40.39&#x202F;&#x00B1;&#x202F;21.46</td>
<td align="center" valign="top">64.32&#x202F;&#x00B1;&#x202F;4.64</td>
</tr>
<tr>
<td align="left" valign="top">AUC</td>
<td align="center" valign="top"><bold>71.86&#x202F;&#x00B1;&#x202F;7.05</bold></td>
<td align="center" valign="top">61.43&#x202F;&#x00B1;&#x202F;9.49</td>
<td align="center" valign="top">49.66&#x202F;&#x00B1;&#x202F;11.17</td>
<td align="center" valign="top">47.78&#x202F;&#x00B1;&#x202F;10.46</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p>Bold values indicate the best performance in terms of Accuracy and AUC.</p>
</table-wrap-foot>
</table-wrap>
<p>To evaluate whether synchronization features (i.e., ratio, PLV, and MI) could enhance the classification performance, we combined the spectral power feature with these synchronization features as inputs to the SVM-RBF models. <xref ref-type="table" rid="tab3">Table 3</xref> presents the classification results from all possible combinations of spectral power and synchronization-related features. Overall, combining spectral power with any of the synchronization features improved classification accuracy and AUC. Under LOSO cross-validation, the combination of spectral power, ratio, and PLV achieved the highest accuracy (71.57&#x202F;&#x00B1;&#x202F;10.79%) and AUC (79.87&#x202F;&#x00B1;&#x202F;15.59%). In 5-fold cross-validation, the combination of all four features yielded the highest accuracy (75.53&#x202F;&#x00B1;&#x202F;8.40%), while the combination of spectral power, ratio, and PLV achieved the highest AUC (76.58&#x202F;&#x00B1;&#x202F;7.07%).</p>
<table-wrap position="float" id="tab3">
<label>Table 3</label>
<caption>
<p>Classification performance using feature combinations under LOSO and 5-fold cross-validation (mean &#x00B1; SD).</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th>Metrics</th>
<th align="center" valign="top">Accuracy</th>
<th align="center" valign="top">Recall</th>
<th align="center" valign="top">Precision</th>
<th align="center" valign="top">F1-score</th>
<th align="center" valign="top">AUC</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top" colspan="6">LOSO cross validation (%)</td>
</tr>
<tr>
<td align="left" valign="top">Power + MI</td>
<td align="center" valign="top">69.21&#x202F;&#x00B1;&#x202F;14.55</td>
<td align="center" valign="top">66.39&#x202F;&#x00B1;&#x202F;31.00</td>
<td align="center" valign="top">68.58&#x202F;&#x00B1;&#x202F;30.96</td>
<td align="center" valign="top">64.55&#x202F;&#x00B1;&#x202F;25.87</td>
<td align="center" valign="top">77.51&#x202F;&#x00B1;&#x202F;11.34</td>
</tr>
<tr>
<td align="left" valign="top">Power + PLV</td>
<td align="center" valign="top">71.02&#x202F;&#x00B1;&#x202F;15.24</td>
<td align="center" valign="top">67.78&#x202F;&#x00B1;&#x202F;32.32</td>
<td align="center" valign="top">71.16&#x202F;&#x00B1;&#x202F;32.37</td>
<td align="center" valign="top">66.21&#x202F;&#x00B1;&#x202F;26.80</td>
<td align="center" valign="top">77.88&#x202F;&#x00B1;&#x202F;15.79</td>
</tr>
<tr>
<td align="left" valign="top">Power + Ratio</td>
<td align="center" valign="top">70.46&#x202F;&#x00B1;&#x202F;12.59</td>
<td align="center" valign="top">62.96&#x202F;&#x00B1;&#x202F;33.85</td>
<td align="center" valign="top">74.45&#x202F;&#x00B1;&#x202F;33.41</td>
<td align="center" valign="top">63.35&#x202F;&#x00B1;&#x202F;26.01</td>
<td align="center" valign="top">79.29&#x202F;&#x00B1;&#x202F;14.84</td>
</tr>
<tr>
<td align="left" valign="top">Power + Ratio +PLV</td>
<td align="center" valign="top"><bold>71.57&#x202F;&#x00B1;&#x202F;10.79</bold></td>
<td align="center" valign="top">67.04&#x202F;&#x00B1;&#x202F;28.89</td>
<td align="center" valign="top">83.34&#x202F;&#x00B1;&#x202F;18.39</td>
<td align="center" valign="top">67.70&#x202F;&#x00B1;&#x202F;17.13</td>
<td align="center" valign="top"><bold>79.87&#x202F;&#x00B1;&#x202F;15.59</bold></td>
</tr>
<tr>
<td align="left" valign="top">Power + Ratio + MI</td>
<td align="center" valign="top">68.10&#x202F;&#x00B1;&#x202F;13.36</td>
<td align="center" valign="top">67.04&#x202F;&#x00B1;&#x202F;28.89</td>
<td align="center" valign="top">79.87&#x202F;&#x00B1;&#x202F;221.71</td>
<td align="center" valign="top">65.67&#x202F;&#x00B1;&#x202F;16.94</td>
<td align="center" valign="top">76.63&#x202F;&#x00B1;&#x202F;16.32</td>
</tr>
<tr>
<td align="left" valign="top">Power + PLV&#x202F;+&#x202F;MI</td>
<td align="center" valign="top">67.82&#x202F;&#x00B1;&#x202F;16.42</td>
<td align="center" valign="top">68.24&#x202F;&#x00B1;&#x202F;30.39</td>
<td align="center" valign="top">66.85&#x202F;&#x00B1;&#x202F;31.36</td>
<td align="center" valign="top">64.64&#x202F;&#x00B1;&#x202F;25.85</td>
<td align="center" valign="top">77.44&#x202F;&#x00B1;&#x202F;13.03</td>
</tr>
<tr>
<td align="left" valign="top">Power + Ratio + PLV&#x202F;+&#x202F;MI</td>
<td align="center" valign="top">70.37&#x202F;&#x00B1;&#x202F;13.79</td>
<td align="center" valign="top">77.41&#x202F;&#x00B1;&#x202F;24.82</td>
<td align="center" valign="top">76.20&#x202F;&#x00B1;&#x202F;18.67</td>
<td align="center" valign="top">71.42&#x202F;&#x00B1;&#x202F;12.79</td>
<td align="center" valign="top">78.34&#x202F;&#x00B1;&#x202F;14.53</td>
</tr>
<tr>
<td align="left" valign="top" colspan="6">5-fold cross validation (%)</td>
</tr>
<tr>
<td align="left" valign="top">Power + MI</td>
<td align="center" valign="top">72.21&#x202F;&#x00B1;&#x202F;12.74</td>
<td align="center" valign="top">71.44&#x202F;&#x00B1;&#x202F;17.12</td>
<td align="center" valign="top">72.50&#x202F;&#x00B1;&#x202F;12.36</td>
<td align="center" valign="top">71.60&#x202F;&#x00B1;&#x202F;13.26</td>
<td align="center" valign="top">75.20&#x202F;&#x00B1;&#x202F;7.05</td>
</tr>
<tr>
<td align="left" valign="top">Power + PLV</td>
<td align="center" valign="top">71.21&#x202F;&#x00B1;&#x202F;12.16</td>
<td align="center" valign="top">71.44&#x202F;&#x00B1;&#x202F;17.12</td>
<td align="center" valign="top">70.56&#x202F;&#x00B1;&#x202F;9.94</td>
<td align="center" valign="top">70.79&#x202F;&#x00B1;&#x202F;12.90</td>
<td align="center" valign="top">74.56&#x202F;&#x00B1;&#x202F;8.24</td>
</tr>
<tr>
<td align="left" valign="top">Power + Ratio</td>
<td align="center" valign="top">73.43&#x202F;&#x00B1;&#x202F;10.34</td>
<td align="center" valign="top">70.97&#x202F;&#x00B1;&#x202F;11.06</td>
<td align="center" valign="top">75.29&#x202F;&#x00B1;&#x202F;11.41</td>
<td align="center" valign="top">72.79&#x202F;&#x00B1;&#x202F;9.81</td>
<td align="center" valign="top">76.42&#x202F;&#x00B1;&#x202F;7.16</td>
</tr>
<tr>
<td align="left" valign="top">Power + Ratio + PLV</td>
<td align="center" valign="top">71.43&#x202F;&#x00B1;&#x202F;8.31</td>
<td align="center" valign="top">75.20&#x202F;&#x00B1;&#x202F;11.27</td>
<td align="center" valign="top">69.97&#x202F;&#x00B1;&#x202F;7.46</td>
<td align="center" valign="top">72.31&#x202F;&#x00B1;&#x202F;8.38</td>
<td align="center" valign="top"><bold>76.58&#x202F;&#x00B1;&#x202F;7.07</bold></td>
</tr>
<tr>
<td align="left" valign="top">Power + Ratio + MI</td>
<td align="center" valign="top">71.09&#x202F;&#x00B1;&#x202F;7.19</td>
<td align="center" valign="top">73.83&#x202F;&#x00B1;&#x202F;13.18</td>
<td align="center" valign="top">70.60&#x202F;&#x00B1;&#x202F;6.76</td>
<td align="center" valign="top">71.55&#x202F;&#x00B1;&#x202F;7.64</td>
<td align="center" valign="top">75.07&#x202F;&#x00B1;&#x202F;6.26</td>
</tr>
<tr>
<td align="left" valign="top">Power + PLV&#x202F;+&#x202F;MI</td>
<td align="center" valign="top">71.97&#x202F;&#x00B1;&#x202F;11.17</td>
<td align="center" valign="top">76.51&#x202F;&#x00B1;&#x202F;14.24</td>
<td align="center" valign="top">69.88&#x202F;&#x00B1;&#x202F;8.77</td>
<td align="center" valign="top">72.94&#x202F;&#x00B1;&#x202F;11.03</td>
<td align="center" valign="top">74.39&#x202F;&#x00B1;&#x202F;7.59</td>
</tr>
<tr>
<td align="left" valign="top">Power + Ratio + PLV&#x202F;+&#x202F;MI</td>
<td align="center" valign="top"><bold>75.53&#x202F;&#x00B1;&#x202F;8.40</bold></td>
<td align="center" valign="top">80.50&#x202F;&#x00B1;&#x202F;5.43</td>
<td align="center" valign="top">70.87&#x202F;&#x00B1;&#x202F;7.32</td>
<td align="center" valign="top">75.23&#x202F;&#x00B1;&#x202F;5.62</td>
<td align="center" valign="top">76.12&#x202F;&#x00B1;&#x202F;5.81</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p>Bold values indicate the best performance in terms of Accuracy and AUC.</p>
</table-wrap-foot>
</table-wrap>
</sec>
</sec>
<sec sec-type="discussion" id="sec22">
<label>4</label>
<title>Discussion</title>
<p>This study aimed to advance the understanding of the neurophysiological signatures and detection of mind wandering (MW) by investigating EEG synchronization features within a novel visuo-haptic force control paradigm. Although MW has been extensively studied in visual and auditory domains, its neural correlates during sensorimotor engagement&#x2014;particularly involving force modulation&#x2014;remain largely unexplored. Moreover, despite the theoretical importance of neural synchronization in attention regulation, few studies have systematically assessed the efficacy of EEG-based functional connectivity (FC), cross-frequency coupling (CFC), and neural-behavioral synchronization (NBS) in detecting MW, especially in haptic contexts. Our investigation yielded key insights into the neural signatures of MW during force control and demonstrated the feasibility of using synchronization features for attentional state classification.</p>
<sec id="sec23">
<label>4.1</label>
<title>Neural signatures of mind wandering</title>
<p>The proposed continuous force control task effectively induced MW episodes, as evidenced by a significant upward trend in off-task ratings over trials and a marked degradation in behavioral performance (i.e., increased BE<sub>NL-trial</sub> errors) during the MW condition compared to the onT condition. In terms of neural activity, we observed increased alpha power (8&#x2013;10&#x202F;Hz) over frontal and parieto-occipital regions during MW, with source localization revealing the increased activity in the left middle occipital gyrus. This widespread alpha power increase during MW aligns with the &#x201C;perceptual decoupling&#x201D; hypothesis (<xref ref-type="bibr" rid="ref52">Schooler et al., 2011</xref>; <xref ref-type="bibr" rid="ref54">Smallwood, 2013</xref>) and numerous EEG studies using visual vigilance tasks such as the SART (<xref ref-type="bibr" rid="ref8">Compton et al., 2019</xref>; <xref ref-type="bibr" rid="ref22">Jin et al., 2019</xref>). Notably, our findings are consistent with recent observations by <xref ref-type="bibr" rid="ref35">Luna et al. (2023)</xref>, who reported increased alpha power in left occipital regions prior to missed targets compared to correct detections in a visual vigilance task, suggesting that alpha increases may reflect attentional disengagement across both visual and visuo-motor tasks. Regarding the haptic domain, while <xref ref-type="bibr" rid="ref44">Peng et al. (2021)</xref> reported increased frontal-central alpha associated with off-task states in a discrete force task, we observed a more distributed pattern involving both frontal and posterior areas. The localized increase in the left middle occipital gyrus indicates that even in a force-focused task, MW involves disengagement of visual processing regions, likely reflecting the visuo-haptic integration demands of our paradigm, since force adjustments in our task were guided by visual feedback. These findings highlight the modality-independent nature of alpha increases as a potential neural marker of attentional disengagement.</p>
<p>Beyond single-frequency analyses, we evaluated cross-frequency coupling and found significantly reduced probability density of high alpha-theta ratios (2.6&#x2013;3.0) under the MW state, particularly over parietal-occipital electrodes PO3 and PO4. While prior work by <xref ref-type="bibr" rid="ref48">Rodriguez-Larios and Alaerts (2019</xref>, <xref ref-type="bibr" rid="ref49">2021)</xref> and <xref ref-type="bibr" rid="ref50">Rodriguez-Larios et al. (2020)</xref> linked increased alpha-theta phase synchrony to MW during meditation, our focus on harmonic frequency ratios reveals a different aspect of cross-frequency organization. Harmonic ratios near 3.0 may support on-task attention by enabling stable cross-frequency phase coupling. This precise phase alignment likely optimizes communication between neural assemblies supporting top-down control (theta) and those involved in sensory inhibition (alpha), thereby facilitating efficient information integration. The reduction of these stable harmonic ratios during MW suggests a breakdown in this coordinated cross-frequency mechanism. This view aligns with theories proposing that optimal cognitive control relies on harmonic cross-frequency arrangements enabling effective communication between neural assemblies (<xref ref-type="bibr" rid="ref16">Fries, 2005</xref>; <xref ref-type="bibr" rid="ref43">Palva and Palva, 2017</xref>). The parietal-occipital localization (PO3/PO4) further implicates visuospatial processing networks in maintaining this rhythmic coordination during force tracking. Nevertheless, whether the near 3.0 ratios observed here also support focused attention in other motor tasks or in other modalities (such as pure visual or auditory tasks) remains to be determined by future studies. Direct cross-task comparisons and source-level CFC analyses would be required to assess the generalizability.</p>
<p>Additionally, MW was associated with enhanced functional connectivity across sensorimotor networks. Within the 8&#x2013;10&#x202F;Hz band, MW states exhibited significantly stronger phase locking within the left central motor area and between the PO-RC, F-LC, and LC-RC communities. One plausible interpretation is the compensatory recruitment of task-relevant sensorimotor assemblies when top-down control wanes; such localized synchronization could transiently support baseline performance despite attentional lapses. This finding extends previous fMRI research highlighting the dominance of the DMN during mind wandering by suggesting a context-dependent compensatory mechanism: in the force control task requiring continuous sensorimotor engagement, attenuated top-down attention may trigger localized synchronization within task-relevant networks to sustain baseline performance (<xref ref-type="bibr" rid="ref5">Christoff et al., 2009</xref>). This aligns with recent findings demonstrating a dynamically interdependent relationship between external (sensory and motor processing) and internal cognition (mind wandering) (<xref ref-type="bibr" rid="ref34">Long et al., 2025</xref>). In accordance with the recently proposed &#x201C;Baseline model of internal and external cognition&#x201D; (<xref ref-type="bibr" rid="ref41">Northoff et al., 2022</xref>), the observed hyperconnectivity likely reflects inefficient neural resource reallocation&#x2014;where heightened &#x201C;noisy&#x201D; processing in sensorimotor circuits fails to fully compensate for attentional lapses&#x2014;as evidenced by concurrent increases in behavioral errors. Our results underscore that MW dynamically redistributes neural resources with sensorimotor synchronization representing a signature of embodied attentional fluctuations. Nevertheless, alternative explanations for increased sensor-level connectivity&#x2014;such as contamination by muscle activity, volume conduction, field spread, or other recording artifacts&#x2014;cannot be ruled out (<xref ref-type="bibr" rid="ref19">Haufe et al., 2012</xref>). Importantly, our connectivity estimates were derived at the sensor level without full source-level leakage correction; therefore, these results should be interpreted with caution as preliminary evidence. Future studies should complement sensor-level FC with source reconstruction, leakage-robust metrics, and muscle activity monitoring to better distinguish neural coupling from confounds.</p>
<p>In addition to neural synchronization metrics, we also examined the coupling between neural activity and behavioral performance by assessing the mutual information between the force error (i.e., BE<sub>NL</sub>) and EEG power. During MW, the MI was significantly reduced within the 7.2&#x2013;8.8&#x202F;Hz band over several frontal electrodes, suggesting a breakdown in the real-time coupling between brain dynamics and motor output during attentional lapses. This observation is conceptually novel. Although previous studies have linked behavioral variability (e.g., RT variability) to MW (<xref ref-type="bibr" rid="ref14">Esterman et al., 2013</xref>; <xref ref-type="bibr" rid="ref44">Peng et al., 2021</xref>), none have quantified the dynamic synchronization between continuous neural signals and high-frequency motor performance. The 7.2&#x2013;8.8&#x202F;Hz band overlaps with the low-alpha/mu rhythm, known to reflect motor cortical excitability and somatosensory processing. Reduced NBS likely reflects a weakened predictive relationship between fluctuations in this rhythm and moment-to-moment force control accuracy weakens during MW. This decoupling might provide an objective neurobehavioral signature of attentional disengagement specific to active motor tasks and represents an advance beyond static behavioral error measures.</p>
</sec>
<sec id="sec24">
<label>4.2</label>
<title>Classification performance</title>
<p>The SVM classification achieved optimal performance when traditional power features were combined with synchronization metrics. Within-participant models (5-fold CV) using all features reached 75.53% accuracy (AUC&#x202F;=&#x202F;76.12%), while cross-participant models (LOSO CV) using power + alpha-theta ratio + FC features achieved 71.57% accuracy (AUC&#x202F;=&#x202F;79.87%). These results are comparable to or slightly better than performance reported in similar binary classification studies (<xref ref-type="bibr" rid="ref22">Jin et al., 2019</xref>, <xref ref-type="bibr" rid="ref23">2020</xref>; <xref ref-type="bibr" rid="ref12">Dong et al., 2021</xref>; <xref ref-type="bibr" rid="ref3">Chen et al., 2022</xref>). Several factors likely contributed to the classification performance. First, the combination of commonly used features (i.e., power) and novel synchronization features (cross-frequency ratio, FC, NBS) offered complementary information. Notably, adding synchronization features consistently boosted performance over power features alone (e.g., LOSO AUC increased from 76.37 to 79.87% with power&#x202F;+&#x202F;ratio&#x202F;+&#x202F;FC), highlighting the value of capturing distributed network dynamics and brain-behavior interactions. Second, the proposed continuous force task provided a rich stream of behavioral data (1,000&#x202F;Hz) tightly synchronized with EEG data. This enabled the calculation of NBS, a feature unavailable in discrete response tasks and proved to be a useful feature for classification. Third, the continuous, dynamic nature of the force control task likely elicited more pronounced and ecologically valid MW states compared to simpler vigilance tasks, leading to clearer neural dissociations.</p>
<p>However, direct comparisons are challenging due to differences in tasks, probing methods, and classification approaches (e.g., LOPOCV vs. LOSO vs. within-participant). Our cross-participant accuracy (71.57%) highlights the challenge of generalizing models across individuals, a common limitation in the field. Future work should explore more advanced normalization or domain adaptation techniques.</p>
</sec>
<sec id="sec25">
<label>4.3</label>
<title>Limitations and future work</title>
<p>While this study offers novel insights, the findings should be considered with several limitations. First, the final sample size of nine participants (after artifact rejection) is relatively small and all participants were male. Future studies should recruit larger, more diverse cohorts, including females and individuals from broader age ranges, to enhance generalizability and to explore potential sex differences in neural correlates of MW during motor tasks. Second, while synchronization features improved classification, their neurobiological interpretation remains complex. For instance, increased FC within motor areas during MW could reflect different processes (e.g., maladaptive noise, inefficient compensation, or muscle artifacts). Given the sensor-level nature of our connectivity analyses, we emphasize the need for source-space reconstructions and leakage-robust measures in future work to distinguish true inter-region interactions from sensor-level mixing. Future research should also integrate computational modeling or causal interventions (e.g., transcranial magnetic stimulation) to elucidate the functional significance of these connectivity patterns. Third, EEG epochs were extracted from the 3&#x202F;s preceding thought-probes, which occurred at 40&#x2013;50&#x202F;s intervals. This quasi-periodicity could have induced anticipatory effects or strategic attention re-engagement just before probes, potentially affecting the MW vs. onT contrast. However, the overall monotonic increase in MW reports across trials, coupled with the corresponding behavioral degradation, suggests that anticipatory effects alone are unlikely to fully account for our main findings. Nevertheless, future studies are necessary to employ randomized probe timing to eliminate this potential confound.</p>
<p>Finally, our findings pertain to a specific visuo-haptic force control task. The generalizability of the identified synchronization signatures to other haptic tasks or modalities remains to be investigated. Future studies should systematically compare MW signatures across different task types, such as pure haptic vs. visuo-haptic or force vs. texture discrimination. Future studies should also explore more sophisticated synchronization measures, such as directed connectivity (<xref ref-type="bibr" rid="ref59">Tafreshi et al., 2019</xref>), graph theory metrics (<xref ref-type="bibr" rid="ref39">Moon et al., 2020</xref>), and advanced machine learning algorithms that may better capture complex spatiotemporal dynamics in EEG data.</p>
</sec>
</sec>
<sec sec-type="conclusions" id="sec26">
<label>5</label>
<title>Conclusion</title>
<p>This study demonstrates that EEG synchronization features&#x2014;including functional connectivity within sensorimotor networks, alterations in alpha-theta cross-frequency coupling, and neural&#x2013;behavioral synchronization&#x2014;serve as sensitive and complementary markers of mind wandering during a continuous visuo-haptic force control task. Our findings extend the understanding of attentional fluctuations beyond visual/auditory paradigms, revealing task-specific neural dynamics characterized by increased motor network synchrony and disrupted neural-behavioral alignment during attentional lapses. Using these synchronization features, machine learning classifiers achieved 75.53% within-participant and 71.57% cross-participant accuracy when combined with spectral power features, confirming their feasibility and complementary value for decoding covert attentional states. This work extends the understanding of neural representations underlying attentional fluctuations during the continuous force control process. While our paradigm was constrained to a laboratory setting, the identified synchronization markers may provide preliminary insights to guide the future development of haptic-based attention training systems. Such systems could serve as a complementary approach to existing visual and auditory modalities in contexts such as sports training, surgical skill learning, or neurorehabilitation, pending further validation in more ecologically valid settings.</p>
</sec>
</body>
<back>
<sec sec-type="data-availability" id="sec27">
<title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec sec-type="ethics-statement" id="sec28">
<title>Ethics statement</title>
<p>The studies involving humans were approved by Beihang University, Beijing, China. The studies were conducted in accordance with the local legislation and institutional requirements. The participants provided their written informed consent to participate in this study.</p>
</sec>
<sec sec-type="author-contributions" id="sec29">
<title>Author contributions</title>
<p>YZ: Funding acquisition, Writing &#x2013; original draft, Formal analysis, Writing &#x2013; review &#x0026; editing, Methodology. QW: Writing &#x2013; original draft, Formal analysis. QT: Writing &#x2013; review &#x0026; editing, Methodology, Software. BT: Methodology, Writing &#x2013; review &#x0026; editing, Software. PS: Writing &#x2013; review &#x0026; editing, Resources. YX: Writing &#x2013; review &#x0026; editing, Resources. DW: Supervision, Writing &#x2013; review &#x0026; editing, Conceptualization.</p>
</sec>
<sec sec-type="funding-information" id="sec30">
<title>Funding</title>
<p>The author(s) declare that financial support was received for the research and/or publication of this article. This work was supported by the Beijing Natural Science Foundation (no. 4244090), the R&#x0026;D Program of Beijing Municipal Education Commission (no. KM202411232026), and the Research Program of Beijing Information Science and Technology University (no. 2023XJJ05). Additional support was provided by the Beijing Natural Science Foundation (no. L233035 and 3244039) and the Ministry of Civil Affairs Rehabilitation Field Key Laboratory and Engineering Technology Research Center Operating Expenses Project (no. 102118170090010009004).</p>
</sec>
<sec sec-type="COI-statement" id="sec31">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
<p>The author(s) declared that they were an editorial board member of Frontiers, at the time of submission. This had no impact on the peer review process and the final decision.</p>
</sec>
<sec sec-type="ai-statement" id="sec32">
<title>Generative AI statement</title>
<p>The authors declare that no Gen AI was used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p>
</sec>
<sec sec-type="disclaimer" id="sec33">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<sec sec-type="supplementary-material" id="sec34">
<title>Supplementary material</title>
<p>The Supplementary material for this article can be found online at: <ext-link xlink:href="https://www.frontiersin.org/articles/10.3389/fnins.2025.1654827/full#supplementary-material" ext-link-type="uri">https://www.frontiersin.org/articles/10.3389/fnins.2025.1654827/full#supplementary-material</ext-link></p>
<supplementary-material xlink:href="Data_Sheet_1.docx" id="SM1" mimetype="application/vnd.openxmlformats-officedocument.wordprocessingml.document" xmlns:xlink="http://www.w3.org/1999/xlink"/>
</sec>
<ref-list>
<title>References</title>
<ref id="ref1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Aydore</surname><given-names>S.</given-names></name> <name><surname>Pantazis</surname><given-names>D.</given-names></name> <name><surname>Leahy</surname><given-names>R. M.</given-names></name></person-group> (<year>2013</year>). <article-title>A note on the phase locking value and its properties</article-title>. <source>NeuroImage</source> <volume>74</volume>, <fpage>231</fpage>&#x2013;<lpage>244</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2013.02.008</pub-id>, PMID: <pub-id pub-id-type="pmid">23435210</pub-id></citation></ref>
<ref id="ref2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Braboszcz</surname><given-names>C.</given-names></name> <name><surname>Delorme</surname><given-names>A.</given-names></name></person-group> (<year>2010</year>). <article-title>Lost in thoughts: neural markers of low alertness during mind wandering</article-title>. <source>NeuroImage</source> <volume>54</volume>, <fpage>3040</fpage>&#x2013;<lpage>3047</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2010.10.008</pub-id></citation></ref>
<ref id="ref3"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chen</surname><given-names>Y. T.</given-names></name> <name><surname>Lee</surname><given-names>H. H.</given-names></name> <name><surname>Shih</surname><given-names>C. Y.</given-names></name> <name><surname>Chen</surname><given-names>Z. L.</given-names></name> <name><surname>Beh</surname><given-names>W. K.</given-names></name> <name><surname>Yeh</surname><given-names>S. L.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>An effective entropy-assisted mind-wandering detection system using EEG signals of MM-SART database</article-title>. <source>IEEE J. Biomed. Health Inform.</source> <volume>26</volume>, <fpage>3649</fpage>&#x2013;<lpage>3660</lpage>. doi: <pub-id pub-id-type="doi">10.1109/JBHI.2022.3187346</pub-id>, PMID: <pub-id pub-id-type="pmid">35767497</pub-id></citation></ref>
<ref id="ref4"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cheyne</surname><given-names>J. A.</given-names></name> <name><surname>Solman</surname><given-names>G. J. F.</given-names></name> <name><surname>Carriere</surname><given-names>J. S. A.</given-names></name> <name><surname>Smilek</surname><given-names>D.</given-names></name></person-group> (<year>2009</year>). <article-title>Anatomy of an error: a bidirectional state model of task engagement/disengagement and attention-related errors</article-title>. <source>Cognition</source> <volume>111</volume>, <fpage>98</fpage>&#x2013;<lpage>113</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cognition.2008.12.009</pub-id>, PMID: <pub-id pub-id-type="pmid">19215913</pub-id></citation></ref>
<ref id="ref5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Christoff</surname><given-names>K.</given-names></name> <name><surname>Gordon</surname><given-names>A. M.</given-names></name> <name><surname>Smallwood</surname><given-names>J.</given-names></name> <name><surname>Smith</surname><given-names>R.</given-names></name> <name><surname>Schooler</surname><given-names>J. W.</given-names></name></person-group> (<year>2009</year>). <article-title>Experience sampling during fMRI reveals default network and executive system contributions to mind wandering</article-title>. <source>Proc. Natl. Acad. Sci.</source> <volume>106</volume>, <fpage>8719</fpage>&#x2013;<lpage>8724</lpage>. doi: <pub-id pub-id-type="doi">10.1073/pnas.0900234106</pub-id>, PMID: <pub-id pub-id-type="pmid">19433790</pub-id></citation></ref>
<ref id="ref6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Christoff</surname><given-names>K.</given-names></name> <name><surname>Irving</surname><given-names>Z. C.</given-names></name> <name><surname>Fox</surname><given-names>K. C.</given-names></name> <name><surname>Spreng</surname><given-names>R. N.</given-names></name> <name><surname>Andrews-Hanna</surname><given-names>J. R.</given-names></name></person-group> (<year>2016</year>). <article-title>Mind-wandering as spontaneous thought: a dynamic framework</article-title>. <source>Nat. Rev. Neurosci.</source> <volume>17</volume>, <fpage>718</fpage>&#x2013;<lpage>731</lpage>. doi: <pub-id pub-id-type="doi">10.1038/nrn.2016.113</pub-id>, PMID: <pub-id pub-id-type="pmid">27654862</pub-id></citation></ref>
<ref id="ref7"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Coelli</surname><given-names>S.</given-names></name> <name><surname>Barbieri</surname><given-names>R.</given-names></name> <name><surname>Reni</surname><given-names>G.</given-names></name> <name><surname>Zucca</surname><given-names>C.</given-names></name> <name><surname>Bianchi</surname><given-names>A. M.</given-names></name></person-group> (<year>2018</year>). <article-title>EEG indices correlate with sustained attention performance in patients affected by diffuse axonal injury</article-title>. <source>Med. Biol. Eng. Comput.</source> <volume>56</volume>, <fpage>991</fpage>&#x2013;<lpage>1001</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s11517-017-1744-5</pub-id>, PMID: <pub-id pub-id-type="pmid">29124529</pub-id></citation></ref>
<ref id="ref8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Compton</surname><given-names>R. J.</given-names></name> <name><surname>Gearinger</surname><given-names>D.</given-names></name> <name><surname>Wild</surname><given-names>H.</given-names></name></person-group> (<year>2019</year>). <article-title>The wandering mind oscillates: EEG alpha power is enhanced during moments of mind-wandering</article-title>. <source>Cogn. Affect. Behav. Neurosci.</source> <volume>19</volume>, <fpage>1184</fpage>&#x2013;<lpage>1191</lpage>. doi: <pub-id pub-id-type="doi">10.3758/s13415-019-00745-9</pub-id>, PMID: <pub-id pub-id-type="pmid">31502206</pub-id></citation></ref>
<ref id="ref9"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Delisle-Rodriguez</surname><given-names>D.</given-names></name> <name><surname>Oliveira</surname><given-names>H. L.</given-names> <suffix>Jr.</suffix></name> <name><surname>da Silva</surname><given-names>J. C.</given-names></name> <name><surname>de Souza</surname><given-names>M. L.</given-names></name> <name><surname>Bastos</surname><given-names>T.</given-names></name> <name><surname>Nakamura-Palacios</surname><given-names>E. M.</given-names></name> <etal/></person-group>. (<year>2023</year>). <article-title>Multi-channel EEG-based BCI using regression and classification methods for attention training by serious game</article-title>. <source>Biomed. Signal Process. Control.</source> <volume>85</volume>:<fpage>104937</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.bspc.2023.104937</pub-id></citation></ref>
<ref id="ref10"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Delorme</surname><given-names>A.</given-names></name> <name><surname>Makeig</surname><given-names>S.</given-names></name></person-group> (<year>2004</year>). <article-title>EEGLAB: an open source toolbox for analysis of single-trial EEG dynamics including independent component analysis</article-title>. <source>J. Neurosci. Methods</source> <volume>134</volume>, <fpage>9</fpage>&#x2013;<lpage>21</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jneumeth.2003.10.009</pub-id>, PMID: <pub-id pub-id-type="pmid">15102499</pub-id></citation></ref>
<ref id="ref11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dong</surname><given-names>L.</given-names></name> <name><surname>Li</surname><given-names>F.</given-names></name> <name><surname>Liu</surname><given-names>Q.</given-names></name> <name><surname>Wen</surname><given-names>X.</given-names></name> <name><surname>Lai</surname><given-names>Y.</given-names></name> <name><surname>Xu</surname><given-names>P.</given-names></name> <etal/></person-group>. (<year>2017</year>). <article-title>MATLAB toolboxes for reference electrode standardization technique (REST) of scalp EEG</article-title>. <source>Front. Neurosci.</source> <volume>11</volume>:<fpage>601</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fnins.2017.00601</pub-id>, PMID: <pub-id pub-id-type="pmid">29163006</pub-id></citation></ref>
<ref id="ref12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dong</surname><given-names>H. W.</given-names></name> <name><surname>Mills</surname><given-names>C.</given-names></name> <name><surname>Knight</surname><given-names>R. T.</given-names></name> <name><surname>Kam</surname><given-names>J. W. Y.</given-names></name></person-group> (<year>2021</year>). <article-title>Detection of mind wandering using EEG: within and across individuals</article-title>. <source>PLoS One</source> <volume>16</volume>:<fpage>e0251490</fpage>. doi: <pub-id pub-id-type="doi">10.1371/journal.pone.0251490</pub-id>, PMID: <pub-id pub-id-type="pmid">33979407</pub-id></citation></ref>
<ref id="ref13"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Eskandari Nasab</surname><given-names>M.</given-names></name> <name><surname>Raeisi</surname><given-names>Z.</given-names></name> <name><surname>Lashaki</surname><given-names>R. A.</given-names></name> <name><surname>Najafi</surname><given-names>H.</given-names></name></person-group> (<year>2024</year>). <article-title>A GRU&#x2013;CNN model for auditory attention detection using microstate and recurrence quantification analysis</article-title>. <source>Sci. Rep.</source> <volume>14</volume>:<fpage>8861</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41598-024-58886-y</pub-id>, PMID: <pub-id pub-id-type="pmid">38632246</pub-id></citation></ref>
<ref id="ref14"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Esterman</surname><given-names>M.</given-names></name> <name><surname>Noonan</surname><given-names>S. K.</given-names></name> <name><surname>Rosenberg</surname><given-names>M.</given-names></name> <name><surname>DeGutis</surname><given-names>J.</given-names></name></person-group> (<year>2013</year>). <article-title>In the zone or zoning out? Tracking behavioral and neural fluctuations during sustained attention</article-title>. <source>Cereb. Cortex</source> <volume>23</volume>, <fpage>2712</fpage>&#x2013;<lpage>2723</lpage>. doi: <pub-id pub-id-type="doi">10.1093/cercor/bhs261</pub-id>, PMID: <pub-id pub-id-type="pmid">22941724</pub-id></citation></ref>
<ref id="ref15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fortenbaugh</surname><given-names>F. C.</given-names></name> <name><surname>Rothlein</surname><given-names>D.</given-names></name> <name><surname>McGlinchey</surname><given-names>R.</given-names></name> <name><surname>DeGutis</surname><given-names>J.</given-names></name> <name><surname>Esterman</surname><given-names>M.</given-names></name></person-group> (<year>2018</year>). <article-title>Tracking behavioral and neural fluctuations during sustained attention: a robust replication and extension</article-title>. <source>NeuroImage</source> <volume>171</volume>, <fpage>148</fpage>&#x2013;<lpage>164</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2018.01.002</pub-id>, PMID: <pub-id pub-id-type="pmid">29307606</pub-id></citation></ref>
<ref id="ref16"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fries</surname><given-names>P.</given-names></name></person-group> (<year>2005</year>). <article-title>A mechanism for cognitive dynamics: neuronal communication through neuronal coherence</article-title>. <source>Trends Cogn. Sci.</source> <volume>9</volume>, <fpage>474</fpage>&#x2013;<lpage>480</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.tics.2005.08.011</pub-id>, PMID: <pub-id pub-id-type="pmid">16150631</pub-id></citation></ref>
<ref id="ref17"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Groot</surname><given-names>J. M.</given-names></name> <name><surname>Boayue</surname><given-names>N. M.</given-names></name> <name><surname>Csifcs&#x00E1;k</surname><given-names>G.</given-names></name> <name><surname>Boekel</surname><given-names>W.</given-names></name> <name><surname>Huster</surname><given-names>R.</given-names></name> <name><surname>Forstmann</surname><given-names>B. U.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>Probing the neural signature of mind wandering with simultaneous fMRI-EEG and pupillometry</article-title>. <source>NeuroImage</source> <volume>224</volume>:<fpage>117412</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2020.117412</pub-id>, PMID: <pub-id pub-id-type="pmid">33011417</pub-id></citation></ref>
<ref id="ref18"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Grunwald</surname><given-names>M.</given-names></name></person-group> (<year>2008</year>). <source>Human haptic perception: Basic and applications</source>. <publisher-loc>Basel, Switzerland</publisher-loc>: <publisher-name>Birkhauser Verlag</publisher-name>.</citation></ref>
<ref id="ref19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Haufe</surname><given-names>S.</given-names></name> <name><surname>Nikulin</surname><given-names>V. V.</given-names></name> <name><surname>M&#x00FC;ller</surname><given-names>K. R.</given-names></name> <name><surname>Nolte</surname><given-names>G.</given-names></name></person-group> (<year>2012</year>). <article-title>A critical assessment of connectivity measures for EEG data: a simulation study</article-title>. <source>NeuroImage</source> <volume>60</volume>, <fpage>476</fpage>&#x2013;<lpage>493</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2011.12.010</pub-id></citation></ref>
<ref id="ref20"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hosseini</surname><given-names>S.</given-names></name> <name><surname>Guo</surname><given-names>X.</given-names></name></person-group> (<year>2019</year>). <article-title>Deep convolutional neural network for automated detection of mind wandering using EEG signals</article-title>. <source>ACM Int. Conf. Bioinformatics Comput. Biol. Health Inform.</source>, <fpage>314</fpage>&#x2013;<lpage>319</lpage>. doi: <pub-id pub-id-type="doi">10.1145/3307339.3342176</pub-id></citation></ref>
<ref id="ref21"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jang</surname><given-names>J. H.</given-names></name> <name><surname>Jung</surname><given-names>W. H.</given-names></name> <name><surname>Kang</surname><given-names>D.</given-names></name> <name><surname>Byun</surname><given-names>M. S.</given-names></name> <name><surname>Kwon</surname><given-names>S. J.</given-names></name> <name><surname>Choi</surname><given-names>C.</given-names></name> <etal/></person-group>. (<year>2011</year>). <article-title>Increased default mode network connectivity associated with meditation</article-title>. <source>Neurosci. Lett.</source> <volume>487</volume>, <fpage>358</fpage>&#x2013;<lpage>362</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neulet.2010.10.056</pub-id>, PMID: <pub-id pub-id-type="pmid">21034792</pub-id></citation></ref>
<ref id="ref22"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jin</surname><given-names>C. Y.</given-names></name> <name><surname>Borst</surname><given-names>J. P.</given-names></name> <name><surname>van Vugt</surname><given-names>M. K.</given-names></name></person-group> (<year>2019</year>). <article-title>Predicting task-general mind-wandering with EEG</article-title>. <source>Cogn. Affect. Behav. Neurosci.</source> <volume>19</volume>, <fpage>1059</fpage>&#x2013;<lpage>1073</lpage>. doi: <pub-id pub-id-type="doi">10.3758/s13415-019-00707-1</pub-id>, PMID: <pub-id pub-id-type="pmid">30850931</pub-id></citation></ref>
<ref id="ref23"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jin</surname><given-names>C. Y.</given-names></name> <name><surname>Borst</surname><given-names>J. P.</given-names></name> <name><surname>van Vugt</surname><given-names>M. K.</given-names></name></person-group> (<year>2020</year>). <article-title>Distinguishing vigilance decrement and low task demands from mind-wandering: a machine learning analysis of EEG</article-title>. <source>Eur. J. Neurosci.</source> <volume>52</volume>, <fpage>4147</fpage>&#x2013;<lpage>4164</lpage>. doi: <pub-id pub-id-type="doi">10.1111/ejn.14863</pub-id>, PMID: <pub-id pub-id-type="pmid">32538509</pub-id></citation></ref>
<ref id="ref24"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>John</surname><given-names>T. N.</given-names></name> <name><surname>Puthankattil</surname><given-names>S. D.</given-names></name> <name><surname>Menon</surname><given-names>R.</given-names></name></person-group> (<year>2018</year>). <article-title>Analysis of long range dependence in the EEG signals of Alzheimer patients</article-title>. <source>Cogn. Neurodyn.</source> <volume>12</volume>, <fpage>183</fpage>&#x2013;<lpage>199</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s11571-017-9467-8</pub-id>, PMID: <pub-id pub-id-type="pmid">29564027</pub-id></citation></ref>
<ref id="ref25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jothiral</surname><given-names>S. N.</given-names></name> <name><surname>Mills</surname><given-names>C.</given-names></name> <name><surname>Irving</surname><given-names>Z. C.</given-names></name> <name><surname>Kam</surname><given-names>J. W. Y.</given-names></name></person-group> (<year>2025</year>). <article-title>Detection of freely moving thoughts using SVM and EEG signals</article-title>. <source>J. Neural Eng.</source> <volume>22</volume>:<fpage>026021</fpage>. doi: <pub-id pub-id-type="doi">10.1088/1741-2552/adbd77</pub-id></citation></ref>
<ref id="ref26"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kam</surname><given-names>J. W. Y.</given-names></name> <name><surname>Dao</surname><given-names>E.</given-names></name> <name><surname>Farley</surname><given-names>J.</given-names></name> <name><surname>Fitzpatrick</surname><given-names>K.</given-names></name> <name><surname>Smallwood</surname><given-names>J.</given-names></name> <name><surname>Schooler</surname><given-names>J. W.</given-names></name> <etal/></person-group>. (<year>2011</year>). <article-title>Slow fluctuations in attentional control of sensory cortex</article-title>. <source>J. Cogn. Neurosci.</source> <volume>23</volume>, <fpage>460</fpage>&#x2013;<lpage>470</lpage>. doi: <pub-id pub-id-type="doi">10.1162/jocn.2010.21443</pub-id>, PMID: <pub-id pub-id-type="pmid">20146593</pub-id></citation></ref>
<ref id="ref27"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kam</surname><given-names>J. W. Y.</given-names></name> <name><surname>Rahnuma</surname><given-names>T.</given-names></name> <name><surname>Park</surname><given-names>Y. E.</given-names></name> <name><surname>Hart</surname><given-names>C. M.</given-names></name></person-group> (<year>2022</year>). <article-title>Electrophysiological markers of mind wandering: a systematic review</article-title>. <source>NeuroImage</source> <volume>258</volume>:<fpage>119372</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2022.119372</pub-id>, PMID: <pub-id pub-id-type="pmid">35700946</pub-id></citation></ref>
<ref id="ref28"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Killingsworth</surname><given-names>M. A.</given-names></name> <name><surname>Gilbert</surname><given-names>D. T.</given-names></name></person-group> (<year>2010</year>). <article-title>A wandering mind is an unhappy mind</article-title>. <source>Science</source> <volume>330</volume>:<fpage>932</fpage>. doi: <pub-id pub-id-type="doi">10.1126/science.1192439</pub-id>, PMID: <pub-id pub-id-type="pmid">21071660</pub-id></citation></ref>
<ref id="ref29"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kucyi</surname><given-names>A.</given-names></name> <name><surname>Esterman</surname><given-names>M.</given-names></name> <name><surname>Riley</surname><given-names>C. S.</given-names></name> <name><surname>Valera</surname><given-names>E. M.</given-names></name></person-group> (<year>2016</year>). <article-title>Spontaneous default network activity reflects behavioral variability independent of mind-wandering</article-title>. <source>Proc. Natl. Acad. Sci. USA</source> <volume>113</volume>, <fpage>13899</fpage>&#x2013;<lpage>13904</lpage>. doi: <pub-id pub-id-type="doi">10.1073/pnas.1611743113</pub-id>, PMID: <pub-id pub-id-type="pmid">27856733</pub-id></citation></ref>
<ref id="ref30"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lederman</surname><given-names>S. J.</given-names></name> <name><surname>Klatzky</surname><given-names>R. L.</given-names></name></person-group> (<year>2009</year>). <article-title>Haptic perception: a tutorial</article-title>. <source>Atten. Percept. Psychophys.</source> <volume>71</volume>, <fpage>1439</fpage>&#x2013;<lpage>1459</lpage>. doi: <pub-id pub-id-type="doi">10.3758/APP.71.7.1439</pub-id>, PMID: <pub-id pub-id-type="pmid">19801605</pub-id></citation></ref>
<ref id="ref31"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Liang</surname><given-names>Z.</given-names></name> <name><surname>Wang</surname><given-names>X.</given-names></name> <name><surname>Zhao</surname><given-names>J.</given-names></name> <name><surname>Li</surname><given-names>X.</given-names></name></person-group> (<year>2022</year>). <article-title>Comparative study of attention-related features on attention monitoring systems with a single EEG channel</article-title>. <source>J. Neurosci. Methods</source> <volume>382</volume>:<fpage>109711</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jneumeth.2022.109711</pub-id>, PMID: <pub-id pub-id-type="pmid">36126733</pub-id></citation></ref>
<ref id="ref32"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Liu</surname><given-names>Y.</given-names></name> <name><surname>Meng</surname><given-names>J.</given-names></name> <name><surname>Yao</surname><given-names>M.</given-names></name> <name><surname>Ye</surname><given-names>Q.</given-names></name> <name><surname>Fan</surname><given-names>B.</given-names></name> <name><surname>Peng</surname><given-names>W.</given-names></name></person-group> (<year>2019</year>). <article-title>Hearing other&#x2019;s pain is associated with sensitivity to physical pain: an ERP study</article-title>. <source>Biol. Psychol.</source> <volume>145</volume>, <fpage>150</fpage>&#x2013;<lpage>158</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.biopsycho.2019.03.011</pub-id>, PMID: <pub-id pub-id-type="pmid">30914209</pub-id></citation></ref>
<ref id="ref33"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Liu</surname><given-names>Y.</given-names></name> <name><surname>Zhao</surname><given-names>J.</given-names></name> <name><surname>Zhou</surname><given-names>X.</given-names></name> <name><surname>Liu</surname><given-names>X.</given-names></name> <name><surname>Chen</surname><given-names>H.</given-names></name> <name><surname>Yuan</surname><given-names>H.</given-names></name></person-group> (<year>2021</year>). <article-title>The neural markers of self-caught and probe-caught mind wandering: an ERP study</article-title>. <source>Brain Sci.</source> <volume>11</volume>:<fpage>1329</fpage>. doi: <pub-id pub-id-type="doi">10.3390/brainsci11101329</pub-id>, PMID: <pub-id pub-id-type="pmid">34679394</pub-id></citation></ref>
<ref id="ref34"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Long</surname><given-names>Z.</given-names></name> <name><surname>Northoff</surname><given-names>G.</given-names></name> <name><surname>Fu</surname><given-names>X.</given-names></name></person-group> (<year>2025</year>). <article-title>Task difficulty modulates the effect of mind wandering on phase coherence during sensorimotor processing</article-title>. <source>Proc. Natl. Acad. Sci. USA</source> <volume>122</volume>:<fpage>e2416387122</fpage>. doi: <pub-id pub-id-type="doi">10.1073/pnas.2416387122</pub-id></citation></ref>
<ref id="ref35"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Luna</surname><given-names>F. G.</given-names></name> <name><surname>Aguirre</surname><given-names>M. J.</given-names></name> <name><surname>Mart&#x00ED;n-Ar&#x00E9;valo</surname><given-names>E.</given-names></name> <name><surname>Ib&#x00E1;&#x00F1;ez</surname><given-names>A.</given-names></name> <name><surname>Lupi&#x00E1;&#x00F1;ez</surname><given-names>J.</given-names></name> <name><surname>Barttfeld</surname><given-names>P.</given-names></name></person-group> (<year>2023</year>). <article-title>Different oscillatory rhythms anticipate failures in executive and arousal vigilance</article-title>. <source>Front. Cognit.</source> <volume>2</volume>:<fpage>8442</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fcogn.2023.1128442</pub-id></citation></ref>
<ref id="ref36"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Maris</surname><given-names>E.</given-names></name> <name><surname>Oostenveld</surname><given-names>R.</given-names></name></person-group> (<year>2007</year>). <article-title>Nonparametric statistical testing of EEG- and MEG-data</article-title>. <source>J. Neurosci. Methods</source> <volume>164</volume>, <fpage>177</fpage>&#x2013;<lpage>190</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jneumeth.2007.03.024</pub-id>, PMID: <pub-id pub-id-type="pmid">17517438</pub-id></citation></ref>
<ref id="ref37"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Melinscak</surname><given-names>F.</given-names></name> <name><surname>Montesano</surname><given-names>L.</given-names></name> <name><surname>Minguez</surname><given-names>J.</given-names></name></person-group> (<year>2016</year>). <article-title>Asynchronous detection of kinesthetic attention during mobilization of lower limbs using EEG measurements</article-title>. <source>J. Neural Eng.</source> <volume>13</volume>:<fpage>016018</fpage>. doi: <pub-id pub-id-type="doi">10.1088/1741-2560/13/1/016018</pub-id>, PMID: <pub-id pub-id-type="pmid">26735705</pub-id></citation></ref>
<ref id="ref38"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mittner</surname><given-names>M.</given-names></name> <name><surname>Boekel</surname><given-names>W.</given-names></name> <name><surname>Tucker</surname><given-names>A. M.</given-names></name> <name><surname>Turner</surname><given-names>B. M.</given-names></name> <name><surname>Heathcote</surname><given-names>A.</given-names></name> <name><surname>Forstmann</surname><given-names>B. U.</given-names></name></person-group> (<year>2014</year>). <article-title>When the brain takes a break: a model-based analysis of mind wandering</article-title>. <source>J. Neurosci.</source> <volume>34</volume>, <fpage>16286</fpage>&#x2013;<lpage>16295</lpage>. doi: <pub-id pub-id-type="doi">10.1523/JNEUROSCI.2062-14.2014</pub-id>, PMID: <pub-id pub-id-type="pmid">25471568</pub-id></citation></ref>
<ref id="ref39"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Moon</surname><given-names>S. E.</given-names></name> <name><surname>Chen</surname><given-names>C. J.</given-names></name> <name><surname>Hsieh</surname><given-names>C. J.</given-names></name> <name><surname>Wang</surname><given-names>J. L.</given-names></name> <name><surname>Lee</surname><given-names>J. S.</given-names></name></person-group> (<year>2020</year>). <article-title>Emotional EEG classification using connectivity features and convolutional neural networks</article-title>. <source>Neural Netw.</source> <volume>132</volume>, <fpage>96</fpage>&#x2013;<lpage>107</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neunet.2020.08.009</pub-id>, PMID: <pub-id pub-id-type="pmid">32861918</pub-id></citation></ref>
<ref id="ref40"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nakahara</surname><given-names>H.</given-names></name> <name><surname>Sakai</surname><given-names>K.</given-names></name> <name><surname>Nakamura</surname><given-names>K.</given-names></name> <name><surname>Hikosaka</surname><given-names>O.</given-names></name></person-group> (<year>2002</year>). <article-title>Central mechanisms of motor skill learning</article-title>. <source>Curr. Opin. Neurobiol.</source> <volume>12</volume>, <fpage>217</fpage>&#x2013;<lpage>222</lpage>. doi: <pub-id pub-id-type="doi">10.1016/s0959-4388(02)00307-0</pub-id>, PMID: <pub-id pub-id-type="pmid">12015240</pub-id></citation></ref>
<ref id="ref41"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Northoff</surname><given-names>G.</given-names></name> <name><surname>Vatansever</surname><given-names>D.</given-names></name> <name><surname>Scalabrini</surname><given-names>A.</given-names></name> <name><surname>Stamatakis</surname><given-names>E. A.</given-names></name></person-group> (<year>2022</year>). <article-title>Ongoing brain activity and its role in cognition: dual versus baseline models</article-title>. <source>Neuroscientist</source> <volume>29</volume>, <fpage>393</fpage>&#x2013;<lpage>420</lpage>. doi: <pub-id pub-id-type="doi">10.1177/10738584221081752</pub-id>, PMID: <pub-id pub-id-type="pmid">35611670</pub-id></citation></ref>
<ref id="ref42"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Oostenveld</surname><given-names>R.</given-names></name> <name><surname>Fries</surname><given-names>P.</given-names></name> <name><surname>Maris</surname><given-names>E.</given-names></name> <name><surname>Schoffelen</surname><given-names>J. M.</given-names></name></person-group> (<year>2011</year>). <article-title>FieldTrip: open source software for advanced analysis of MEG, EEG, and invasive electrophysiological data</article-title>. <source>Comput. Intell. Neurosci.</source> <volume>2011</volume>:<fpage>156869</fpage>. doi: <pub-id pub-id-type="doi">10.1155/2011/156869</pub-id>, PMID: <pub-id pub-id-type="pmid">21253357</pub-id></citation></ref>
<ref id="ref43"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Palva</surname><given-names>J. M.</given-names></name> <name><surname>Palva</surname><given-names>S.</given-names></name></person-group> (<year>2017</year>). <article-title>Functional integration across oscillation frequencies by cross-frequency phase synchronization</article-title>. <source>Eur. J. Neurosci.</source> <volume>48</volume>, <fpage>2399</fpage>&#x2013;<lpage>2406</lpage>. doi: <pub-id pub-id-type="doi">10.1111/ejn.13767</pub-id>, PMID: <pub-id pub-id-type="pmid">29094462</pub-id></citation></ref>
<ref id="ref44"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Peng</surname><given-names>C.</given-names></name> <name><surname>Peng</surname><given-names>W.</given-names></name> <name><surname>Feng</surname><given-names>W.</given-names></name> <name><surname>Zhang</surname><given-names>Y.</given-names></name> <name><surname>Xiao</surname><given-names>J.</given-names></name> <name><surname>Wang</surname><given-names>D.</given-names></name></person-group> (<year>2021</year>). <article-title>EEG correlates of sustained attention variability during discrete multi-finger force control tasks</article-title>. <source>IEEE Trans. Haptics</source> <volume>14</volume>, <fpage>526</fpage>&#x2013;<lpage>537</lpage>. doi: <pub-id pub-id-type="doi">10.1109/TOH.2021.3055842</pub-id>, PMID: <pub-id pub-id-type="pmid">33523817</pub-id></citation></ref>
<ref id="ref45"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pernet</surname><given-names>C. R.</given-names></name> <name><surname>Latinus</surname><given-names>M.</given-names></name> <name><surname>Nichols</surname><given-names>T. E.</given-names></name> <name><surname>Rousselet</surname><given-names>G. A.</given-names></name></person-group> (<year>2015</year>). <article-title>Cluster-based computational methods for mass univariate analyses of event-related brain potentials/fields: a simulation study</article-title>. <source>J. Neurosci. Methods</source> <volume>250</volume>, <fpage>85</fpage>&#x2013;<lpage>93</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jneumeth.2014.08.003</pub-id>, PMID: <pub-id pub-id-type="pmid">25128255</pub-id></citation></ref>
<ref id="ref46"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rahman</surname><given-names>T. T.</given-names></name> <name><surname>Polskaia</surname><given-names>N.</given-names></name> <name><surname>St-Amant</surname><given-names>G.</given-names></name> <name><surname>Salzman</surname><given-names>T.</given-names></name> <name><surname>Vallejo</surname><given-names>T. D.</given-names></name> <name><surname>Lajoie</surname><given-names>Y.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>An fNIRS investigation of discrete and continuous cognitive demands during dual-task walking in young adults</article-title>. <source>Front. Hum. Neurosci.</source> <volume>16</volume>:<fpage>711054</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fnhum.2021.711054</pub-id></citation></ref>
<ref id="ref47"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Reteig</surname><given-names>C. L.</given-names></name> <name><surname>van den Brink</surname><given-names>L. R.</given-names></name> <name><surname>Prinssen</surname><given-names>S.</given-names></name> <name><surname>Cohen</surname><given-names>X. M.</given-names></name> <name><surname>Slagter</surname><given-names>A. H.</given-names></name></person-group> (<year>2019</year>). <article-title>Sustaining attention for a prolonged period of time increases temporal variability in cortical responses</article-title>. <source>Cortex</source> <volume>117</volume>, <fpage>16</fpage>&#x2013;<lpage>32</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cortex.2019.02.016</pub-id></citation></ref>
<ref id="ref48"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rodriguez-Larios</surname><given-names>J.</given-names></name> <name><surname>Alaerts</surname><given-names>K.</given-names></name></person-group> (<year>2019</year>). <article-title>Tracking transient changes in the neural frequency architecture: harmonic relationships between theta and alpha peaks facilitate cognitive performance</article-title>. <source>J. Neurosci.</source> <volume>39</volume>, <fpage>6291</fpage>&#x2013;<lpage>6298</lpage>. doi: <pub-id pub-id-type="doi">10.1523/JNEUROSCI.2919-18.2019</pub-id>, PMID: <pub-id pub-id-type="pmid">31175211</pub-id></citation></ref>
<ref id="ref49"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rodriguez-Larios</surname><given-names>J.</given-names></name> <name><surname>Alaerts</surname><given-names>K.</given-names></name></person-group> (<year>2021</year>). <article-title>EEG alpha-theta dynamics during mind wandering in the context of breath focus meditation: an experience sampling approach with novice meditation practitioners</article-title>. <source>Eur. J. Neurosci.</source> <volume>53</volume>, <fpage>1855</fpage>&#x2013;<lpage>1868</lpage>. doi: <pub-id pub-id-type="doi">10.1111/ejn.15073</pub-id>, PMID: <pub-id pub-id-type="pmid">33289167</pub-id></citation></ref>
<ref id="ref50"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rodriguez-Larios</surname><given-names>J.</given-names></name> <name><surname>Faber</surname><given-names>P.</given-names></name> <name><surname>Achermann</surname><given-names>P.</given-names></name> <name><surname>Tei</surname><given-names>S.</given-names></name> <name><surname>Alaerts</surname><given-names>K.</given-names></name></person-group> (<year>2020</year>). <article-title>From thoughtless awareness to effortful cognition: alpha &#x2013; theta cross-frequency dynamics in experienced meditators during meditation, rest and arithmetic</article-title>. <source>Sci. Rep.</source> <volume>10</volume>:<fpage>5419</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41598-020-62392-2</pub-id>, PMID: <pub-id pub-id-type="pmid">32214173</pub-id></citation></ref>
<ref id="ref51"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rosenberg</surname><given-names>J. R.</given-names></name> <name><surname>Amjad</surname><given-names>A. M.</given-names></name> <name><surname>Breeze</surname><given-names>P.</given-names></name> <name><surname>Brillinger</surname><given-names>D. R.</given-names></name> <name><surname>Halliday</surname><given-names>D. M.</given-names></name></person-group> (<year>1989</year>). <article-title>The fourier approach to the identification of functional coupling between neuronal spike trains</article-title>. <source>Prog. Biophys. Mol. Biol.</source> <volume>53</volume>, <fpage>1</fpage>&#x2013;<lpage>31</lpage>. doi: <pub-id pub-id-type="doi">10.1016/0079-6107(89)90004-7</pub-id>, PMID: <pub-id pub-id-type="pmid">2682781</pub-id></citation></ref>
<ref id="ref52"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Schooler</surname><given-names>J. W.</given-names></name> <name><surname>Smallwood</surname><given-names>J.</given-names></name> <name><surname>Christoff</surname><given-names>K.</given-names></name> <name><surname>Handy</surname><given-names>T. C.</given-names></name> <name><surname>Reichle</surname><given-names>E. D.</given-names></name> <name><surname>Sayette</surname><given-names>M. A.</given-names></name></person-group> (<year>2011</year>). <article-title>Meta-awareness, perceptual decoupling and the wandering mind</article-title>. <source>Trends Cogn. Sci.</source> <volume>15</volume>, <fpage>319</fpage>&#x2013;<lpage>326</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.tics.2011.05.006</pub-id>, PMID: <pub-id pub-id-type="pmid">21684189</pub-id></citation></ref>
<ref id="ref53"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Seli</surname><given-names>P.</given-names></name> <name><surname>Kane</surname><given-names>M. J.</given-names></name> <name><surname>Smallwood</surname><given-names>J.</given-names></name> <name><surname>Schacter</surname><given-names>D. L.</given-names></name> <name><surname>Maillet</surname><given-names>D.</given-names></name> <name><surname>Schooler</surname><given-names>J. W.</given-names></name> <etal/></person-group>. (<year>2018</year>). <article-title>Mind-wandering as a natural kind: a family-resemblances view</article-title>. <source>Trends Cogn. Sci.</source> <volume>22</volume>, <fpage>479</fpage>&#x2013;<lpage>490</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.tics.2018.03.010</pub-id>, PMID: <pub-id pub-id-type="pmid">29776466</pub-id></citation></ref>
<ref id="ref54"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Smallwood</surname><given-names>J.</given-names></name></person-group> (<year>2013</year>). <article-title>Distinguishing how from why the mind wanders: a process-occurrence framework for self-generated mental activity</article-title>. <source>Psychol. Bull.</source> <volume>139</volume>, <fpage>519</fpage>&#x2013;<lpage>535</lpage>. doi: <pub-id pub-id-type="doi">10.1037/a0030010</pub-id>, PMID: <pub-id pub-id-type="pmid">23607430</pub-id></citation></ref>
<ref id="ref55"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Smallwood</surname><given-names>J.</given-names></name> <name><surname>Brown</surname><given-names>K. S.</given-names></name> <name><surname>Tipper</surname><given-names>C. M.</given-names></name> <name><surname>Giesbrecht</surname><given-names>B.</given-names></name> <name><surname>Franklin</surname><given-names>M. S.</given-names></name> <name><surname>Mrazek</surname><given-names>M. D.</given-names></name> <etal/></person-group>. (<year>2011</year>). <article-title>Pupillometric evidence for the decoupling of attention from perceptual input during offline thought</article-title>. <source>PLoS One</source> <volume>6</volume>:<fpage>e18298</fpage>. doi: <pub-id pub-id-type="doi">10.1371/journal.pone.0018298</pub-id>, PMID: <pub-id pub-id-type="pmid">21464969</pub-id></citation></ref>
<ref id="ref56"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Smallwood</surname><given-names>J.</given-names></name> <name><surname>O&#x2019;Connor</surname><given-names>R. C.</given-names></name> <name><surname>Sudbery</surname><given-names>M. V.</given-names></name> <name><surname>Obonsawin</surname><given-names>M.</given-names></name></person-group> (<year>2007</year>). <article-title>Mind-wandering and dysphoria</article-title>. <source>Cogn. Emot.</source> <volume>21</volume>, <fpage>816</fpage>&#x2013;<lpage>842</lpage>. doi: <pub-id pub-id-type="doi">10.1080/02699930600911531</pub-id></citation></ref>
<ref id="ref57"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Smallwood</surname><given-names>J.</given-names></name> <name><surname>Schooler</surname><given-names>J. W.</given-names></name></person-group> (<year>2015</year>). <article-title>The science of mind wandering: empirically navigating the stream of consciousness</article-title>. <source>Annu. Rev. Psychol.</source> <volume>66</volume>, <fpage>487</fpage>&#x2013;<lpage>518</lpage>. doi: <pub-id pub-id-type="doi">10.1146/annurev-psych-010814-015331</pub-id>, PMID: <pub-id pub-id-type="pmid">25293689</pub-id></citation></ref>
<ref id="ref58"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Stawarczyk</surname><given-names>D.</given-names></name> <name><surname>Majerus</surname><given-names>S.</given-names></name> <name><surname>Maj</surname><given-names>M.</given-names></name> <name><surname>Van der Linden</surname><given-names>M.</given-names></name> <name><surname>D&#x2019;Argembeau</surname><given-names>A.</given-names></name></person-group> (<year>2011</year>). <article-title>Mind-wandering: phenomenology and function as assessed with a novel experience sampling method</article-title>. <source>Acta Psychol.</source> <volume>136</volume>, <fpage>370</fpage>&#x2013;<lpage>381</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.actpsy.2011.01.002</pub-id></citation></ref>
<ref id="ref59"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tafreshi</surname><given-names>T. F.</given-names></name> <name><surname>Daliri</surname><given-names>M. R.</given-names></name> <name><surname>Ghodousi</surname><given-names>M.</given-names></name></person-group> (<year>2019</year>). <article-title>Functional and effective connectivity based features of EEG signals for object recognition</article-title>. <source>Cogn. Neurodyn.</source> <volume>13</volume>, <fpage>555</fpage>&#x2013;<lpage>566</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s11571-019-09556-7</pub-id>, PMID: <pub-id pub-id-type="pmid">31741692</pub-id></citation></ref>
<ref id="ref60"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tang</surname><given-names>S.</given-names></name> <name><surname>Li</surname><given-names>Z.</given-names></name></person-group> (<year>2024</year>). <article-title>EEG complexity measures for detecting mind wandering during video-based learning</article-title>. <source>Sci. Rep.</source> <volume>14</volume>:<fpage>8209</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41598-024-58889-9</pub-id>, PMID: <pub-id pub-id-type="pmid">38589498</pub-id></citation></ref>
<ref id="ref61"><citation citation-type="confproc"><person-group person-group-type="author"><name><surname>Tasika</surname><given-names>N. J.</given-names></name> <name><surname>Haque</surname><given-names>M. H.</given-names></name> <name><surname>Rimo</surname><given-names>M. B.</given-names></name> <name><surname>AlHaque</surname><given-names>M.</given-names></name> <name><surname>Alam</surname><given-names>S.</given-names></name> <name><surname>Tamanna</surname><given-names>T.</given-names></name> <etal/></person-group>. (<year>2020</year>). <article-title>A framework for mind wandering detection using EEG signals</article-title>. <conf-name>IEEE region 10 Symp</conf-name>. <fpage>1474</fpage>&#x2013;<lpage>1477</lpage>.</citation></ref>
<ref id="ref62"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wainstein</surname><given-names>G.</given-names></name> <name><surname>Rojasl&#x00ED;bano</surname><given-names>D.</given-names></name> <name><surname>Crossley</surname><given-names>N. A.</given-names></name> <name><surname>Carrasco</surname><given-names>X.</given-names></name> <name><surname>Aboitiz</surname><given-names>F.</given-names></name> <name><surname>Ossand&#x00F3;n</surname><given-names>T.</given-names></name></person-group> (<year>2017</year>). <article-title>Pupil size tracks attentional performance in attention deficit/hyperactivity disorder</article-title>. <source>Sci. Rep.</source> <volume>7</volume>:<fpage>8228</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41598-017-08246-w</pub-id></citation></ref>
<ref id="ref63"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname><given-names>D.</given-names></name> <name><surname>Zhang</surname><given-names>X.</given-names></name> <name><surname>Zhang</surname><given-names>Y.</given-names></name> <name><surname>Xiao</surname><given-names>J.</given-names></name></person-group> (<year>2013</year>). <article-title>Configuration-based optimization for six degree-of-freedom haptic rendering for fine manipulation</article-title>. <source>IEEE Trans. Haptics</source> <volume>6</volume>, <fpage>167</fpage>&#x2013;<lpage>180</lpage>. doi: <pub-id pub-id-type="doi">10.1109/ToH.2012.63</pub-id></citation></ref>
<ref id="ref64"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wiebe</surname><given-names>A.</given-names></name> <name><surname>Selaskowski</surname><given-names>B.</given-names></name> <name><surname>Paskin</surname><given-names>M.</given-names></name> <name><surname>Asch&#x00E9;</surname><given-names>L.</given-names></name> <name><surname>Pakos</surname><given-names>J.</given-names></name> <name><surname>Aslan</surname><given-names>B.</given-names></name> <etal/></person-group>. (<year>2024</year>). <article-title>Virtual reality-assisted prediction of adult ADHD based on eye tracking, EEG, actigraphy and behavioral indices: a machine learning analysis of independent training and test samples</article-title>. <source>Transl. Psychiatry</source> <volume>14</volume>:<fpage>508</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41398-024-03217-y</pub-id>, PMID: <pub-id pub-id-type="pmid">39741130</pub-id></citation></ref>
<ref id="ref65"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Winkler</surname><given-names>I.</given-names></name> <name><surname>Brandl</surname><given-names>S.</given-names></name> <name><surname>Horn</surname><given-names>F.</given-names></name> <name><surname>Waldburger</surname><given-names>E.</given-names></name> <name><surname>Allefeld</surname><given-names>C.</given-names></name> <name><surname>Tangermann</surname><given-names>M.</given-names></name></person-group> (<year>2014</year>). <article-title>Robust artifactual independent component classification for BCI practitioners</article-title>. <source>J. Neural Eng.</source> <volume>11</volume>:<fpage>035013</fpage>. doi: <pub-id pub-id-type="doi">10.1088/1741-2560/11/3/035013</pub-id>, PMID: <pub-id pub-id-type="pmid">24836294</pub-id></citation></ref>
<ref id="ref66"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Winkler</surname><given-names>I.</given-names></name> <name><surname>Haufe</surname><given-names>S.</given-names></name> <name><surname>Tangermann</surname><given-names>M.</given-names></name></person-group> (<year>2011</year>). <article-title>Automatic classification of artifactual ICA-components for artifact removal in EEG signals</article-title>. <source>Behav. Brain Funct.</source> <volume>7</volume>:<fpage>30</fpage>. doi: <pub-id pub-id-type="doi">10.1186/1744-9081-7-30</pub-id>, PMID: <pub-id pub-id-type="pmid">21810266</pub-id></citation></ref>
<ref id="ref67"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zalesky</surname><given-names>A.</given-names></name> <name><surname>Fornito</surname><given-names>A.</given-names></name> <name><surname>Bullmore</surname><given-names>E. T.</given-names></name></person-group> (<year>2010</year>). <article-title>Network-based statistic: identifying differences in brain networks</article-title>. <source>NeuroImage</source> <volume>53</volume>, <fpage>1197</fpage>&#x2013;<lpage>1207</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2010.06.041</pub-id>, PMID: <pub-id pub-id-type="pmid">20600983</pub-id></citation></ref>
<ref id="ref68"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname><given-names>Z.</given-names></name> <name><surname>Kang</surname><given-names>S.</given-names></name> <name><surname>Yu</surname><given-names>J.</given-names></name> <name><surname>Li</surname><given-names>H.</given-names></name> <name><surname>Yin</surname><given-names>G.</given-names></name> <name><surname>Zhang</surname><given-names>H.</given-names></name> <etal/></person-group>. (<year>2023</year>). <article-title>Quantitative identification of ADHD tendency in children with immersive fingertip force control tasks</article-title>. <source>IEEE Trans. Neural Syst. Rehabil. Eng.</source> <volume>31</volume>, <fpage>4561</fpage>&#x2013;<lpage>4569</lpage>. doi: <pub-id pub-id-type="doi">10.1109/TNSRE.2023.3332467</pub-id></citation></ref>
<ref id="ref69"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname><given-names>D.</given-names></name> <name><surname>Lin</surname><given-names>Y.</given-names></name> <name><surname>Jing</surname><given-names>Y.</given-names></name> <name><surname>Feng</surname><given-names>C.</given-names></name> <name><surname>Gu</surname><given-names>R.</given-names></name></person-group> (<year>2019</year>). <article-title>The dynamics of belief updating in human cooperation: findings from inter-brain ERP hyper scanning</article-title>. <source>NeuroImage</source> <volume>198</volume>, <fpage>1</fpage>&#x2013;<lpage>12</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2019.05.029</pub-id></citation></ref>
<ref id="ref70"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhao</surname><given-names>Y.</given-names></name> <name><surname>Yuan</surname><given-names>Y.</given-names></name></person-group> (<year>2025</year>). <article-title>The role of quantitative EEG biomarkers in Alzheimer&#x2019;s disease and mild cognitive impairment: applications and insights</article-title>. <source>Front. Aging Neurosci.</source> <volume>17</volume>:<fpage>1522552</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fnagi.2025.1522552</pub-id>, PMID: <pub-id pub-id-type="pmid">40336944</pub-id></citation></ref>
<ref id="ref71"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zheng</surname><given-names>Y.</given-names></name> <name><surname>Wang</surname><given-names>D.</given-names></name> <name><surname>Zhang</surname><given-names>Y.</given-names></name> <name><surname>Xu</surname><given-names>W.</given-names></name></person-group> (<year>2019</year>). <article-title>Detecting mind wandering: an objective method via simultaneous control of respiration and fingertip pressure</article-title>. <source>Front. Psychol.</source> <volume>10</volume>:<fpage>216</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2019.00216</pub-id>, PMID: <pub-id pub-id-type="pmid">30804854</pub-id></citation></ref>
</ref-list>
</back>
</article>