<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Hum. Neurosci.</journal-id>
<journal-title>Frontiers in Human Neuroscience</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Hum. Neurosci.</abbrev-journal-title>
<issn pub-type="epub">1662-5161</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fnhum.2024.1495592</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Human Neuroscience</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Baseline dependent differences in the perception of changes in visuomotor delay</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name><surname>Vigh</surname> <given-names>Gesche</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/2841898/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Limanowski</surname> <given-names>Jakub</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/74619/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Faculty of Psychology, Technical University of Dresden</institution>, <addr-line>Dresden</addr-line>, <country>Germany</country></aff>
<aff id="aff2"><sup>2</sup><institution>Center for Tactile Internet With Human-in-the-Loop, Technical University of Dresden</institution>, <addr-line>Dresden</addr-line>, <country>Germany</country></aff>
<aff id="aff3"><sup>3</sup><institution>Institute of Psychology, University of Greifswald</institution>, <addr-line>Greifswald</addr-line>, <country>Germany</country></aff>
<author-notes>
<fn fn-type="edited-by" id="fn0001">
<p>Edited by: Hai-Ning Liang, The Hong Kong University of Science and Technology (Guangzhou), China</p>
</fn>
<fn fn-type="edited-by" id="fn0002">
<p>Reviewed by: Olga Mikhailovna Bazanova, Federal Research Center of Fundamental and Translational Medicine, Russia</p>
<p>Tejas Savalia, Emory University, United States</p>
</fn>
<corresp id="c001">&#x002A;Correspondence: Gesche Vigh, <email>g.vigh@gmx.de</email></corresp>
</author-notes>
<pub-date pub-type="epub">
<day>06</day>
<month>01</month>
<year>2025</year>
</pub-date>
<pub-date pub-type="collection">
<year>2024</year>
</pub-date>
<volume>18</volume>
<elocation-id>1495592</elocation-id>
<history>
<date date-type="received">
<day>12</day>
<month>09</month>
<year>2024</year>
</date>
<date date-type="accepted">
<day>16</day>
<month>12</month>
<year>2024</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2025 Vigh and Limanowski.</copyright-statement>
<copyright-year>2025</copyright-year>
<copyright-holder>Vigh and Limanowski</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<sec>
<title>Introduction</title>
<p>The detection of, and adaptation to delayed visual movement feedback has been extensively studied. One important open question is whether the Weber-Fechner Laws hold in the domain of visuomotor delay; i.e., whether the perception of changes in visuomotor delay depends on the amount of delay already present during movement.</p>
</sec>
<sec>
<title>Methods</title>
<p>To address this, we developed a virtual reality based, continuous hand movement task, during which participants had to detect changes in visuomotor mapping (delay): Participants (<italic>N</italic> =&#x202F;40) performed continuous, auditory-paced grasping movements, which were measured with a data glove and transmitted to a virtual hand model. The movements of the virtual hand were delayed between 0 and 700 ms with the delay changing repeatedly in a roving oddball design. Participants had to indicate any perceived delay changes by key press. This design allowed us to investigate detection accuracy and speed related to the magnitude of the delay change, and to the &#x201C;baseline&#x201D; delay present during movement, respectively.</p>
</sec>
<sec>
<title>Results</title>
<p>As expected, larger delay changes were detected more accurately than smaller ones. Surprisingly, delay changes were detected more accurately and faster when participants moved under large &#x003E; small delays.</p>
</sec>
<sec>
<title>Discussion</title>
<p>These results suggest that visual movement feedback delay indeed affects the detection of changes in visuomotor delay, but not as predicted by the Weber-Fechner Laws. Instead, bodily action under small delays may have entailed a larger tolerance for delay changes due to embodiment-related intersensory conflict attenuation; whereas better change detection at large delays may have resulted from their (visual) saliency due to a strong violation of visuomotor predictions.</p>
</sec>
</abstract>
<kwd-group>
<kwd>action</kwd>
<kwd>delay</kwd>
<kwd>virtual reality</kwd>
<kwd>visuomotor mapping</kwd>
<kwd>Weber-Fechner-law</kwd>
</kwd-group>
<contract-num rid="cn1">EXC 2050/1</contract-num>
<contract-num rid="cn2">390696704</contract-num>
<contract-num rid="cn3">AZ 97-932</contract-num>
<contract-sponsor id="cn1">German Research Foundation (DFG, Deutsche Forschungsgemeinschaft)</contract-sponsor>
<contract-sponsor id="cn2">Cluster of Excellence &#x201C;Centre for Tactile Internet with Human-in-the-Loop&#x201D;</contract-sponsor>
<contract-sponsor id="cn3">VolkswagenStiftung<named-content content-type="fundref-id">10.13039/501100001663</named-content></contract-sponsor>
<counts>
<fig-count count="4"/>
<table-count count="2"/>
<equation-count count="1"/>
<ref-count count="55"/>
<page-count count="9"/>
<word-count count="7002"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Cognitive Neuroscience</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="sec1">
<label>1</label>
<title>Introduction</title>
<p>For much of our usual sensory input, like sound, light and weight, perceptibility of a change is proportional to the stimuli perceived before the change. Thus, the larger an initial stimulus is, the larger the change needs to be for the resulting stimulus to be perceived as different. This relationship is described by the Weber-Fechner Laws (<xref ref-type="bibr" rid="ref18">Fechner et al., 1966</xref>), which have been confirmed across different sensory modalities (<xref ref-type="bibr" rid="ref4">Baird, 1997</xref>; <xref ref-type="bibr" rid="ref21">Gescheider, 1997</xref>). Recent empirical evidence suggests that the Weber-Fechner Laws may also apply to more abstract domains like time and reward perception (<xref ref-type="bibr" rid="ref9">Brannon et al., 2008</xref>; <xref ref-type="bibr" rid="ref38">Namboodiri et al., 2014</xref>; <xref ref-type="bibr" rid="ref50">Takahashi, 2005</xref>, <xref ref-type="bibr" rid="ref51">2006</xref>); <xref ref-type="bibr" rid="ref40">Nieder and Miller (2003)</xref>.</p>
<p>The Weber-Fechner-Laws have also been evaluated in the domain of bodily perception. Interestingly, studies investigating self-motion perception found that the relationship between stimulus intensity and change perception was better described by power laws than the Weber-Fechner-Laws (<xref ref-type="bibr" rid="ref33">Mallery et al., 2010</xref>; <xref ref-type="bibr" rid="ref39">Nesti et al., 2015</xref>; <xref ref-type="bibr" rid="ref41">Nouri and Karmali, 2018</xref>). <xref ref-type="bibr" rid="ref11">Carriot et al. (2021)</xref> found improved discrimination performance at higher stimulus levels (of self-motion), rather than nonlinearities predicted by the Weber-Fechner-Laws. Furthermore, <xref ref-type="bibr" rid="ref20">Ganel et al. (2008)</xref> showed that just noticeable differences depended on object size only in perceptual tasks, not in visuall guided grasping tasks&#x2014;concluding that the visual coding for action does not follow the Weber-Fechner-Laws. Thus, it is possible that the processing of stimuli related to bodily action and self-motion may follow different perceptual laws than the perception of external stimuli (<xref ref-type="bibr" rid="ref11">Carriot et al., 2021</xref>). One key question that, to our knowledge, has not yet been addressed is whether the Weber-Fechner Laws hold for the perception of visuomotor delays.</p>
<p>The flexible control of bodily action by the brain relies on the capacity to adapt internal models for motor control to novel sensory movement feedback (<xref ref-type="bibr" rid="ref30">Limanowski, 2022</xref>; <xref ref-type="bibr" rid="ref49">Shadmehr et al., 2010</xref>; <xref ref-type="bibr" rid="ref55">Wolpert and Kawato, 1998</xref>). Sensorimotor adaptation is necessary in cases where, e.g., visual movement feedback is delayed, i.e., a temporal incongruence is introduced between movements and their visual feedback (<xref ref-type="bibr" rid="ref19">Foulkes and Miall, 2000</xref>; <xref ref-type="bibr" rid="ref46">Rohde and Ernst, 2016</xref>). Sensorimotor delays result &#x2018;naturally&#x2019; from conduction delays within our nervous system, thus posing a problem for the brain, particularly regarding the on-line control of action using (delayed) sensory feedback. It is generally agreed upon that the brain overcomes such delays through internal forward modeling of the sensory movement consequences (<xref ref-type="bibr" rid="ref37">Miall and Wolpert, 1996</xref>; <xref ref-type="bibr" rid="ref52">Todorov and Jordan, 2002</xref>). But sensorimotor delays are also increasingly encountered in new technologies enabling cyber-physical interactions, such as virtual reality or robotics (<xref ref-type="bibr" rid="ref43">Peters et al., 2018</xref>; <xref ref-type="bibr" rid="ref48">Schulze et al., 2024</xref>). In most of these scenarios, sensorimotor delays are unavoidable (e.g., due to limitations in bandwidth or computing power) and, more importantly, they can vary unpredictably (<xref ref-type="bibr" rid="ref44">Planthaber et al., 2018</xref>).</p>
<p>Many studies have investigated the perceptual thresholds for detecting visuomotor delays, the capacity to adapt to them, and the neuronal underpinnings of these processes (<xref ref-type="bibr" rid="ref17">Farrer et al., 2008</xref>; <xref ref-type="bibr" rid="ref19">Foulkes and Miall, 2000</xref>; <xref ref-type="bibr" rid="ref27">Krugwasser et al., 2019</xref>; <xref ref-type="bibr" rid="ref28">Leube et al., 2003</xref>; <xref ref-type="bibr" rid="ref29">Limanowski et al., 2017</xref>; <xref ref-type="bibr" rid="ref36">Miall et al., 1985</xref>; <xref ref-type="bibr" rid="ref35">Miall and Jackson, 2006</xref>; <xref ref-type="bibr" rid="ref42">Parvin et al., 2024</xref>; <xref ref-type="bibr" rid="ref46">Rohde and Ernst, 2016</xref>; <xref ref-type="bibr" rid="ref53">Tsakiris et al., 2006</xref>). However, to our knowledge, it has not yet been addressed is whether the perceptual sensitivity to visuomotor delay changes depends on the amount of delay present during movement, as predicted by the Weber-Fechner-Laws.</p>
<p>Therefore, we addressed this question through a virtual reality based hand movement task: Participants performed continuous right-hand grasping movements, paced by an auditory rhythm. Their movements were fed to a photorealistic virtual hand model presented on screen, with an experimentally added delay that changed in a roving oddball fashion (<xref ref-type="fig" rid="fig1">Figure 1</xref>). Participants had to report any perceived delay changes (i.e., changes in the mapping of their real hand movements to those executed by the virtual hand) by key presses, as fast as possible throughout the movement task. We expected larger delay changes to be detected more accurately and faster. Furthermore, following the Weber-Fechner Laws, we expected that participants would detect delays better and quicker if they were moving under relatively smaller delays.</p>
<fig position="float" id="fig1">
<label>Figure 1</label>
<caption>
<p>Experimental task and design. Participants controlled the movements of a photorealistic virtual hand presented on screen <bold>(A)</bold> via their hand movements measured by a data glove worn on their right hand (occluded from view, <bold>B</bold>). <bold>(C)</bold> Participants had to execute continuous grasping (open-and-close) movements paced by an auditory cue, a tone changing volume with a frequency of 0.5&#x202F;Hz (i.e., periodically growing louder and quieter). Flexion data from the glove were fed to the virtual hand model, which displayed the executed finger movements. During the task, we added various amounts of delay to the virtual hand movements. This altered the visuomotor mapping; i.e., the temporal congruence between the participants&#x2019; executed hand movements and the movements of the virtual hand (i.e., the visual movement feedback). The schematic plot shows the real hand closing and opening following the auditory cue&#x2019;s volume, with a delay added to the virtual hand movements. <bold>(D)</bold> During the continuous open-and-close movement task, we repeatedly changed the amount of visual feedback delay in a roving oddball design. This plot shows an example sequence of delays between 0 and 700 ms as used in the experiment (i.e., one run of 4.9&#x202F;min&#x202F;=&#x202F;147 open-and-close movement cycles &#x00E0; 2&#x202F;s each). Participants had to report any perceived changes in the visuomotor delay by pressing a button with their left hand as fast as possible (see <bold>B</bold>).</p>
</caption>
<graphic xlink:href="fnhum-18-1495592-g001.tif"/>
</fig>
</sec>
<sec sec-type="methods" id="sec2">
<label>2</label>
<title>Methods</title>
<sec id="sec3">
<label>2.1</label>
<title>Participants</title>
<p>We recruited 40 healthy, right-handed participants, with normal or corrected-to-normal vision (15 female, mean age&#x202F;=&#x202F;28.1, range&#x202F;=&#x202F;22&#x2013;38). We chose our sample size roughly following previous work investigating sensorimotor conflicts with continuous movement paradigms (<xref ref-type="bibr" rid="ref12">Charalampaki et al., 2024</xref>; <xref ref-type="bibr" rid="ref14">Dewey, 2023</xref>; <xref ref-type="bibr" rid="ref31">Limanowski and Friston, 2020</xref>; <xref ref-type="bibr" rid="ref23">Haering and Kiesel, 2016</xref>; <xref ref-type="bibr" rid="ref1">Asai, 2015</xref>; <xref ref-type="bibr" rid="ref47">Salomon et al., 2013</xref>). Participants were recruited through undergraduate psychology lectures and seminars and social media. We excluded three participants (one because the data glove could not be adequately fitted and calibrated; one participant was not able to perceive the changes in volume of the tracking sound; and one did not perceive any delay of the virtual hand, even for a delay of 700&#x202F;ms), leaving us with 37 participants whose data could be analyzed. The experiment was approved by the ethics committee of the Technische Universit&#x00E4;t Dresden and conducted in accordance with this approval.</p>
</sec>
<sec id="sec4">
<label>2.2</label>
<title>Experimental procedure</title>
<p>First, participants were briefed about the experimental procedure and completed a demographic questionnaire. Then they put on a sanitary glove (for hygienic reasons) and the data glove on their right hand (5Dt Data Glove Ultra, 14 sensors, 10 bit flexure resolution per sensor, 60&#x202F;Hz sampling rate, communication with the PC via USB). The data glove allowed our participants to control a photorealistic virtual hand (<xref ref-type="fig" rid="fig1">Figure 1B</xref>; modelled in blender 2.79, <ext-link xlink:href="https://www.blender.org/" ext-link-type="uri">https://www.blender.org/</ext-link>), by measuring the flexion of the participants&#x2019; fingers and transmitting these values to the virtual hand model&#x2019;s fingers (<xref ref-type="fig" rid="fig1">Figures 1A</xref>&#x2013;<xref ref-type="fig" rid="fig1">C</xref>). The virtual hand was shown on a monitor in front of the participant (1,920&#x202F;&#x00D7;&#x202F;1,080 pixels resolution, screen distance to eye about 90&#x202F;cm, 18.92&#x00B0; of visual angle, 13&#x202F;ms Motion Picture Response Time). Participants placed their right hand on their leg, with the palm facing upward, in a position that allowed for comfortable opening and closing of the hand (see <xref ref-type="fig" rid="fig1">Figure 1A</xref>). Their right hand was occluded from view by a barber coat. The data glove was then calibrated; i.e., the virtual fingers&#x2019; movement range was dynamically adjusted to the sensor values until it plausibly displayed the movements of the real hand.</p>
<p>Participants were instructed to fixate a dot presented centrally on screen, and visible at all times, for the entirety of the experiment (<xref ref-type="fig" rid="fig1">Figure 1B</xref>). The movement task was to execute simple, continuous grasping movements: Participants opened and closed their right hand (starting from a closed position) paced by a rhythmic auditory volume cue; i.e., a 250 Hz tone that grew louder and quieter following a sine wave function with 0.5&#x202F;Hz frequency. When the tone was loudest, the physical hand of the participant was supposed to be open, when it was quietest the hand was supposed to be closed (<xref ref-type="fig" rid="fig1">Figure 1C</xref>).</p>
<p>Before the actual experiment, participants practiced to align their real hand grasping movement with the auditory rhythm (without added delay to the virtual hand) until they felt confident that they could follow the auditory cue with their hand movements continually throughout the main experiment.</p>
<p>In the first part of the experiment, we tested for potential differences in the perceived difficulty of the hand movement task itself across the different delay levels used in the main experiment. Participants had to perform 10 auditory paced grasping movements (20&#x202F;s) while fixating the dot in front of the virtual hand, at each delay level used in the main experiment&#x2014;and rate the task difficulty (see below). I.e., the virtual hand moved either synchronously (0&#x202F;ms delay) or was delayed by 100, 200, 300, 400, 500, 600, or 700&#x202F;ms. We chose relatively long delays for our experiment compared with previous single-trial delay detection tasks (<xref ref-type="bibr" rid="ref28">Leube et al., 2003</xref>; <xref ref-type="bibr" rid="ref29">Limanowski et al., 2017</xref>; <xref ref-type="bibr" rid="ref32">Limanowski et al., 2020</xref>), because our pilot experiments indicated that delay perception was more difficult with our task setup, likely because it required simultaneously maintaining a continuous synchronization of the movements with the pacing rhythm. Each delay level was presented three times, in randomized order, resulting in 24 trials in total. After each trial, we asked our participants to indicate how difficult they felt it was to follow the auditory cue, in comparison to the synchronous practice trial, on a 5-point Likert scale from &#x201C;very easy&#x201D; to &#x201C;very difficult.&#x201D;</p>
<p>In the second, main part of the experiment, participants also executed continuous, paced grasping movements&#x2014;now with an added delay to the virtual hand movements that changed repeatedly, following a roving oddball paradigm (<xref ref-type="fig" rid="fig1">Figure 1D</xref>). Participants put their left index finger on the space bar of a computer keyboard, and were instructed to press it as fast as possible every time they detected a change in the visual movement delay (i.e., a change in the mapping of the virtual hand movements to their actually executed movements). Participants were specifically instructed and trained to maintain a stable grasping rhythm with their real hand; i.e., to not alter their movements in response to the delays or delay changes.</p>
<p>During the experiment, each of 8 &#x201C;baseline&#x201D; delay levels (ranging from 0&#x202F;ms, i.e., synchronous to 700&#x202F;ms in steps of 100&#x202F;ms, see above) was presented for 3 to 6 movement cycles (i.e., 6&#x2013;12&#x202F;s), before changing to a larger or smaller delay. The delays changed between &#x00B1;100 and 400&#x202F;ms (in steps of 100&#x202F;ms), whereby the changes were introduced gradually; i.e., over the course of 0.5&#x202F;s to prevent sudden jumps in the visual movement. The delays were presented in runs of 147 movements (4.9&#x202F;min), each of which contained a predefined, pseudorandomized sequence of 32 delay changes. Thus, we ensured that every run contained 8 delays with each length (3, 4, 5, and 6 movement cycles); and that, within each run, each baseline delay level was presented once in combination with each <italic>absolute</italic> delay change (i.e., &#x00B1;100&#x2013;400&#x202F;ms; 8 delays&#x202F;&#x00D7;&#x202F;4 changes&#x202F;=&#x202F;32 delay changes). We focused our analysis on the absolute magnitude of delay changes (&#x00B1;100&#x2013;400&#x202F;ms) because we could not systematically compare <italic>relative</italic>, i.e., directional delay changes (in- vs. decreases) at all levels. In other words, due to the nature of the design, it was not possible to include all combinations of baseline delay and delay changes in- <italic>and</italic> decreases. For instance, at 700&#x202F;ms, the delay could only decrease, not increase (i.e., there was no change of 700&#x202F;+&#x202F;100&#x202F;ms, only 700&#x2013;100&#x202F;ms). As a control analysis, we tested for potential differences between delay in- vs. decreases with a repeated measures ANOVA with the factors delay change (100&#x2013;400) and direction of change (in- vs. decrease) on the detection scores; which showed that detection was not significantly different between delay in- vs. decreases [<italic>F</italic>(3,108)&#x202F;=&#x202F;0.09, <italic>p</italic>&#x202F;=&#x202F;.77]. An example of a roving oddball delay sequence can be seen in <xref ref-type="fig" rid="fig1">Figure 1D</xref>. Overall, eight such sequences were generated, and presented to each participant in randomized order; resulting in 256 delay changes per participant in total. After each run, participants could take a break until they felt ready for the next one.</p>
</sec>
<sec id="sec5">
<label>2.3</label>
<title>Data analysis</title>
<p>To analyze delay change detection performance, we defined a correct delay detection (&#x201C;hit&#x201D;) as a button press within two movement cycles (4&#x202F;s) of a delay change. We chose to include the second movement cycle, because each delay change was gradually introduced (see above), leaving only a partial movement cycle for &#x201C;detection&#x201D; otherwise. For consistency, we also tested whether our results could be replicated with detection windows of one and three movement cycles. Button presses outside of this window were categorized as false alarms. If multiple button presses were recorded within 250&#x202F;ms of a hit or a false alarm [which we (<xref ref-type="bibr" rid="ref25">Jain et al., 2015</xref>)], they were counted as a single button press.</p>
<p>To quantify the overall delay change detection accuracy, we first calculated the <italic>False Alarm Ratio</italic> as the number of false alarms per number of total alarms (i.e., button presses; <xref ref-type="bibr" rid="ref5">Barnes et al., 2007</xref>). Note that this is different from the often used False Alarm <italic>Rate</italic> (also called False Positive Rate), which is defined as the number of false alarms per total number of &#x201C;non-events.&#x201D; In our case, we could not clearly define &#x201C;non-events,&#x201D; as this would apply to any time point not corresponding to a delay change, i.e., to most of the actual movement period.</p>
<p>Then, we calculated a &#x201C;Detection Score&#x201D; as a more conservative index of detection performance, punishing the percentage of hits by the False Alarm Ratio. Thus, participants who were more hesitant and thus more accurate, but had lower hits overall, would still receive an adequate score as compared to participants that hit the button at a near random rate. We calculated the detection score as follows:</p>
<disp-formula id="E1">
<mml:math id="M1">
<mml:mi mathvariant="italic">detection</mml:mi>
<mml:mspace width="0.25em"/>
<mml:mi mathvariant="italic">score</mml:mi>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mi mathvariant="italic">Percentage</mml:mi>
<mml:mspace width="0.25em"/>
<mml:mi mathvariant="italic">of</mml:mi>
<mml:mspace width="0.25em"/>
<mml:mi mathvariant="italic">Hits</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mn>1</mml:mn>
<mml:mo>+</mml:mo>
<mml:mi mathvariant="italic">FAR</mml:mi>
</mml:mrow>
</mml:mfrac>
</mml:math>
</disp-formula>
<p>Detection scores were calculated for each of the 32 delay changes (see above). We then inserted the detection scores into a 4 &#x00D7; 8 repeated measures ANOVA, with the factors <italic>Delay Change</italic> (100, 200, 300, and 400&#x202F;ms) and <italic>Baseline Delay</italic> (0, 100, 200, 300, 400, 500, 600, 700, and 800&#x202F;ms). To test for non-normal properties, we used the Shapiro test. Since our data showed non-normal properties, we used an aligned ranks transformation ANOVA for our analysis. The degrees of freedom were calculated using the Kenward-Roger approximation. We also tested for outliers, defined as values of more than three times the interquartile range above the third quartile or below the first quartile; no such extreme values were found. Furthermore, we tested for sphericity using Mauchly&#x2019;s Test. Since sphericity may be violated for delays, we used Greenhouse&#x2013;Geisser corrections for our results. When determining the significance of our results, we corrected for multiple comparisons using the Holm-Bonferroni method. Post-hoc, we used a linear regression model (method of least squares) to further evaluate the main effects; i.e., in terms of consistency and directionality.</p>
<p>As an index of tracking performance, we calculated the auditory cue tracking error as the averaged absolute phase shift in time between the normalized mean movement of the participant and the target movement. To test whether tracking performance (i.e., how well participants were able to maintain an accurate and stable grasping rhythm) was influenced by detection performance, we calculated Spearman&#x2019;s rank correlation coefficients for tracking errors vs. the detection scores and vs. the reaction times, respectively, averaged over delay levels.</p>
</sec>
</sec>
<sec sec-type="results" id="sec6">
<label>3</label>
<title>Results</title>
<p>A repeated measures aligned ranks transformation ANOVA on the difficulty ratings given in the first part of the experiment (i.e., the reported difficulty of tracking the auditory target phase with the grasping movements) showed no significant effect of delay on tracking difficulty [<italic>F</italic><sub>(7,885)</sub>&#x202F;=&#x202F;0.64, <italic>p</italic>&#x202F;=&#x202F;.72, for the average difficulty ratings of each delay level see <xref ref-type="table" rid="tab1">Table 1</xref>]. The difficulty ratings for all delay levels were unaffected by order effects (paired t-tests on ratings given in the first vs. last third of the rating blocks, all <italic>n.s.</italic>). Together, this means that participants found the paced movement task itself comparably difficult across all delay levels.</p>
<table-wrap position="float" id="tab1">
<label>Table 1</label>
<caption>
<p>Means and standard deviations for difficulty ratings, tracking error, detection score and reaction times, for all delay levels.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th rowspan="2"/>
<th align="center" valign="top" colspan="8">Delay level</th>
</tr>
<tr>
<th align="center" valign="top">0&#x202F;ms</th>
<th align="center" valign="top">100&#x202F;ms</th>
<th align="center" valign="top">200&#x202F;ms</th>
<th align="center" valign="top">300&#x202F;ms</th>
<th align="center" valign="top">400&#x202F;ms</th>
<th align="center" valign="top">500&#x202F;ms</th>
<th align="center" valign="top">600&#x202F;ms</th>
<th align="center" valign="top">700&#x202F;ms</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Difficulty rating [1&#x2013;5, a.u.]</td>
<td align="center" valign="top">2.58&#x202F;&#x00B1;&#x202F;1.15</td>
<td align="center" valign="top">2.51&#x202F;&#x00B1;&#x202F;1.20</td>
<td align="center" valign="top">2.44&#x202F;&#x00B1;&#x202F;1.08</td>
<td align="center" valign="top">2.51&#x202F;&#x00B1;&#x202F;1.17</td>
<td align="center" valign="top">2.53&#x202F;&#x00B1;&#x202F;1.01</td>
<td align="center" valign="top">2.33&#x202F;&#x00B1;&#x202F;1.04</td>
<td align="center" valign="top">2.50&#x202F;&#x00B1;&#x202F;1.17</td>
<td align="center" valign="top">2.59&#x202F;&#x00B1;&#x202F;1.12</td>
</tr>
<tr>
<td align="left" valign="top">Tracking error [s]</td>
<td align="center" valign="top">0.23&#x202F;&#x00B1;&#x202F;0.17</td>
<td align="center" valign="top">0.22&#x202F;&#x00B1;&#x202F;0.18</td>
<td align="center" valign="top">0.24&#x202F;&#x00B1;&#x202F;0.18</td>
<td align="center" valign="top">0.23&#x202F;&#x00B1;&#x202F;0.18</td>
<td align="center" valign="top">0.23&#x202F;&#x00B1;&#x202F;0.18</td>
<td align="center" valign="top">0.27&#x202F;&#x00B1;&#x202F;0.18</td>
<td align="center" valign="top">0.27&#x202F;&#x00B1;&#x202F;0.18</td>
<td align="center" valign="top">0.24&#x202F;&#x00B1;&#x202F;0.18</td>
</tr>
<tr>
<td align="left" valign="top">Detection score [a.u.]</td>
<td align="center" valign="top">0.08&#x202F;&#x00B1;&#x202F;0.04</td>
<td align="center" valign="top">0.1&#x202F;&#x00B1;&#x202F;0.03</td>
<td align="center" valign="top">0.12&#x202F;&#x00B1;&#x202F;0.04</td>
<td align="center" valign="top">0.12&#x202F;&#x00B1;&#x202F;0.06</td>
<td align="center" valign="top">0.17&#x202F;&#x00B1;&#x202F;0.08</td>
<td align="center" valign="top">0.14&#x202F;&#x00B1;&#x202F;0.02</td>
<td align="center" valign="top">0.16&#x202F;&#x00B1;&#x202F;0.05</td>
<td align="center" valign="top">0.17&#x202F;&#x00B1;&#x202F;0.04</td>
</tr>
<tr>
<td align="left" valign="top">Reaction time [s]</td>
<td align="center" valign="top">2.14&#x202F;&#x00B1;&#x202F;0.18</td>
<td align="center" valign="top">2.04&#x202F;&#x00B1;&#x202F;0.19</td>
<td align="center" valign="top">2.11&#x202F;&#x00B1;&#x202F;0.27</td>
<td align="center" valign="top">2.06&#x202F;&#x00B1;&#x202F;0.20</td>
<td align="center" valign="top">2.02&#x202F;&#x00B1;&#x202F;0.14</td>
<td align="center" valign="top">1.97&#x202F;&#x00B1;&#x202F;0.30</td>
<td align="center" valign="top">1.82&#x202F;&#x00B1;&#x202F;0.20</td>
<td align="center" valign="top">1.85&#x202F;&#x00B1;&#x202F;0.30</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>In the main experiment, participants were able to maintain regular grasping and tracked the auditory cue well and comparably across delay levels (<xref ref-type="fig" rid="fig2">Figure 2</xref>). On average, they slightly lead the phase of the auditory cue, with an average absolute tracking error of 0.24&#x202F;&#x00B1;&#x202F;0.15&#x202F;s. Importantly, however, tracking error did not significantly differ between delay levels [<italic>F</italic><sub>(7,295)</sub>&#x202F;=&#x202F;0.23, <italic>p</italic>&#x202F;=&#x202F;.98, see <xref ref-type="table" rid="tab1">Table 1</xref>]. Together, these results suggest that our participants were well able to comply with the task instructions, and that the amount of delay of the virtual hand itself did not significantly influence tracking (grasping) performance, nor the perceived task difficulty.</p>
<fig position="float" id="fig2">
<label>Figure 2</label>
<caption>
<p>Participants&#x2019; average hand movements per delay level (with standard deviations in gray shading), relative to the instructed rhythm prescribed by the auditory cue. Participants tracked the auditory target rhythm comparably well across all delay levels, albeit overall leading slightly (see Results for details). Thus, visual delay per se did not bias grasping or tracking performance.</p>
</caption>
<graphic xlink:href="fnhum-18-1495592-g002.tif"/>
</fig>
<p>To validate the overall delay change detection accuracy of our participants, we first tested it against the chance level. The average False Alarm Ratio of our participants was 0.45&#x202F;&#x00B1;&#x202F;0.10, which was significantly lower than the ratio that would have resulted from random button pressing (i.e., &#x003E;0.67; t<sub>(36)</sub>&#x202F;=&#x202F;13.13, <italic>p</italic>&#x202F;&#x003C;&#x202F;.001). This suggests that, overall, the participants were able to notice the delay changes and pressed the button accordingly.</p>
<p>An aligned ranks transformation ANOVA on the detection scores showed a significant main effect of Delay Change; i.e., the magnitude of the change in the delay of the visual hand movement (<italic>F</italic><sub>(3,1,116)</sub>&#x202F;=&#x202F;19.48, p&#x202F;&#x003C;&#x202F;.001, &#x019E;<sup>2</sup>&#x202F;=&#x202F;.05). A post-hoc linear regression showed that these effects could be linearly approximated (<xref ref-type="fig" rid="fig3">Figure 3A</xref>, R<sup>2</sup>&#x202F;=&#x202F;.07, Slope&#x202F;=&#x202F;2.58&#x2219;10<sup>&#x2212;4</sup>, df&#x202F;=&#x202F;35, <italic>p</italic>&#x202F;&#x003C;&#x202F;.05; for average detection scores see <xref ref-type="table" rid="tab2">Table 2</xref>). However, there was no significant effect of Delay Change on the corresponding reaction times (<xref ref-type="fig" rid="fig3">Figure 3B</xref>, for means and standard deviations see <xref ref-type="table" rid="tab2">table 2</xref>), while the interaction effect was significant [<italic>F</italic><sub>(21,1,116)</sub>&#x202F;=&#x202F;3.505, <italic>p</italic>&#x202F;&#x003C;&#x202F;.001, &#x019E;<sup>2</sup>&#x202F;=&#x202F;.06]. In sum, as expected, larger delay changes were consistently detected better (albeit not significantly faster) than smaller ones.</p>
<fig position="float" id="fig3">
<label>Figure 3</label>
<caption>
<p>Accuracy and speed of detecting changes in visual movement feedback delay during continuous grasping. The left plots show the average detection scores <bold>(A)</bold> and reaction times <bold>(B)</bold> for each of the 32 combinations of delay change magnitude and baseline delay. Detection accuracy as quantified by the Detection Score was significantly affected by the amount of delay present during movement (main effect Baseline Delay, <italic>p</italic>&#x202F;&#x003C;&#x202F;.001) and the magnitude of the change in delay (main effect Delay Change, <italic>p</italic>&#x202F;&#x003C;&#x202F;.001). Reaction times were significantly affected by Baseline Delay (main effect, <italic>p</italic>&#x202F;&#x003C;&#x202F;.001). The right plots show post-hoc linear regression analyses (see Results for details). Overall, these analyses showed that larger delay changes were detected significantly more accurately (and slightly but not significantly faster) than smaller changes. Furthermore, change detection was significantly better and faster when participants were moving at larger, compared to smaller, delays.</p>
</caption>
<graphic xlink:href="fnhum-18-1495592-g003.tif"/>
</fig>
<table-wrap position="float" id="tab2">
<label>Table 2</label>
<caption>
<p>Means and standard deviations for detection scores and reaction times, for the four different absolute magnitudes of delay change, respectively.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th rowspan="2"/>
<th align="center" valign="top" colspan="4">Delay change (absolute)</th>
</tr>
<tr>
<th align="center" valign="top">100&#x202F;ms</th>
<th align="center" valign="top">200&#x202F;ms</th>
<th align="center" valign="top">300&#x202F;ms</th>
<th align="center" valign="top">400&#x202F;ms</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Detection score [a.u.]</td>
<td align="center" valign="top">0.10&#x202F;&#x00B1;&#x202F;0.03</td>
<td align="center" valign="top">0.11&#x202F;&#x00B1;&#x202F;0.03</td>
<td align="center" valign="top">0.15&#x202F;&#x00B1;&#x202F;0.06</td>
<td align="center" valign="top">0.16&#x202F;&#x00B1;&#x202F;0.04</td>
</tr>
<tr>
<td align="left" valign="top">Reaction time [s]</td>
<td align="center" valign="top">2.07&#x202F;&#x00B1;&#x202F;1.23</td>
<td align="center" valign="top">1.81&#x202F;&#x00B1;&#x202F;0.84</td>
<td align="center" valign="top">2.04&#x202F;&#x00B1;&#x202F;0.76</td>
<td align="center" valign="top">2.01&#x202F;&#x00B1;&#x202F;0.77</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Secondly, there was a significant main effect of Baseline Delay on detection scores [<italic>F</italic><sub>(7,1,116)</sub>&#x202F;=&#x202F;11.76, <italic>p</italic>&#x202F;&#x003C;&#x202F;.001, &#x019E;<sup>2</sup>&#x202F;=&#x202F;.07] and reaction times [<italic>F</italic><sub>(7,1,116)</sub>&#x202F;=&#x202F;6.56, <italic>p</italic>&#x202F;&#x003C;&#x202F;.001, &#x019E;<sup>2</sup>&#x202F;=&#x202F;.04]; as well as a significant interaction between Baseline Delay and Delay Change [<italic>F</italic><sub>(21,1,116)</sub>&#x202F;=&#x202F;2.09, <italic>p</italic>&#x202F;&#x003C;&#x202F;.05, &#x019E;<sup>2</sup>&#x202F;=&#x202F;.04 <xref ref-type="fig" rid="fig3">Figure 3B</xref>]. Post-hoc linear regression analyses showed, again, that these effects could be linearly approximated (detection scores, <xref ref-type="fig" rid="fig3">Figure 3A</xref>: R<sup>2</sup>&#x202F;=&#x202F;.06, Slope&#x202F;=&#x202F;1.18&#x2219;10<sup>&#x2212;4</sup>, df&#x202F;=&#x202F;35, <italic>p</italic>&#x202F;&#x003C;&#x202F;.001; reaction times, <xref ref-type="fig" rid="fig3">Figure 3B</xref>: R<sup>2</sup>&#x202F;=&#x202F;.02, Slope&#x202F;=&#x202F;&#x2212;4.05&#x2219;10<sup>&#x2212;4</sup>, df&#x202F;=&#x202F;35, <italic>p</italic>&#x202F;&#x003C;&#x202F;.05, for means see <xref ref-type="table" rid="tab1">Table 1</xref>). In sum, delay change detection was overall better and faster at larger&#x202F;&#x003E;&#x202F;smaller delays; i.e., the more the visual movement feedback was delayed during movement when the change occurred.</p>
<p>The above significant main and interaction effects on detection performance were also significant when limiting the analysis to a detection window of only the first movement cycle (all <italic>F</italic>s&#x202F;&#x003E;&#x202F;2.2, all <italic>p</italic>s&#x202F;&#x003C;&#x202F;0.01) or the first three movement cycles (all <italic>F</italic>s&#x202F;&#x003E;&#x202F;4.3, all <italic>p</italic>s&#x202F;&#x003C;&#x202F;0.001). The significant main effect of Baseline Delay on the reaction times, and the interaction effect, were also replicated when limiting the analysis to the first movement cycle (all <italic>F</italic>s&#x202F;&#x003E;&#x202F;3.54, all <italic>p</italic>s&#x202F;&#x003C;&#x202F;0.001) or the first three movement cycles (all <italic>F</italic>s&#x202F;&#x003E;&#x202F;1.55, albeit not reaching statistical significance, <italic>p</italic>s&#x202F;&#x003C;&#x202F;0.06). The slopes of the respective linear regressions confirmed the directionality of the effects as in our main analysis. In short, the width of our detection window did not bias our results.</p>
<p>Finally, we tested whether the participants&#x2019; average detection performance covaried with how well they were able to maintain stable tracking. With this, we aimed to detect a possible relationship between delay detection and tracking error despite the fact that tracking performance on average was comparable across delay levels (cf. <xref ref-type="fig" rid="fig2">Figure 2</xref>). This analysis revealed a significant negative correlation between detection accuracy and tracking performance: Participants who tracked the target rhythm overall better (i.e., had a smaller average tracking error) detected less of the delay changes (<italic>&#x03C1;</italic>&#x202F;=&#x202F;.40, <italic>p</italic>&#x202F;&#x003C;&#x202F;.05, <xref ref-type="fig" rid="fig4">Figure 4</xref>). The correlation between reaction times and tracking error showed a similar directionality, but was not significant (smaller tracking error&#x202F;~&#x202F;longer reaction times to delay changes; <italic>&#x03C1;</italic>&#x202F;=&#x202F;&#x2212;.05, <italic>p</italic>&#x202F;=&#x202F;.78).</p>
<fig position="float" id="fig4">
<label>Figure 4</label>
<caption>
<p>Spearman correlation between detection score and tracking error of the individual participants. Participants with better detection scores had a larger tracking error; i.e., they followed the auditory target cue worse with their grasps.</p>
</caption>
<graphic xlink:href="fnhum-18-1495592-g004.tif"/>
</fig>
</sec>
<sec sec-type="discussion" id="sec7">
<label>4</label>
<title>Discussion</title>
<p>Using a virtual reality based continuous hand movement task, we tested whether the detection of changes in visuomotor delay depended on the amount of delay currently present during movement. As expected, we found that larger delay changes were detected significantly better than smaller ones. Somewhat surprisingly, however, delay changes were detected significantly better and faster when participants moved under larger &#x003E; smaller delays. In other words, our results suggest a higher sensitivity to changes in visual movement feedback delay when there already is a substantial amount of delay present during movement. This seems to indicate that the Weber-Fechner Laws do not apply to the perception of (temporal) visuomotor mapping; as they would predict the opposite; i.e., better sensitivity to delay changes under smaller delays.</p>
<p>Results inconsistent with the Weber-Fechner Laws have been reported in other domains before, where they have been interpreted as indicating, for instance, saturation effects (<xref ref-type="bibr" rid="ref2">Augustin, 2008</xref>; <xref ref-type="bibr" rid="ref3">Augustin and Tanja, 2008</xref>; <xref ref-type="bibr" rid="ref11">Carriot et al., 2021</xref>; <xref ref-type="bibr" rid="ref15">Doble et al., 2003</xref>). In our case, we propose two complementary explanations for our results:</p>
<p>Firstly, it is well established that increasing visuomotor delay reliably reduces the self-attribution of the observed movements (also called a loss of control or &#x201C;agency&#x201D;) over those movements (<xref ref-type="bibr" rid="ref27">Krugwasser et al., 2019</xref>; <xref ref-type="bibr" rid="ref54">Wen and Imamizu, 2022</xref>) and the subjective embodiment of the seen moving body part (also called a loss of &#x201C;ownership&#x201D;; see (<xref ref-type="bibr" rid="ref16">Dummer et al., 2009</xref>; <xref ref-type="bibr" rid="ref17">Farrer et al., 2008</xref>; <xref ref-type="bibr" rid="ref28">Leube et al., 2003</xref>; <xref ref-type="bibr" rid="ref53">Tsakiris et al., 2006</xref>). Thus, it is reasonable to assume that, in our experiment, participants experienced a relatively stronger subjective embodiment of the virtual hand under small delays. After our experiment, seven of our participants spontaneously reported having perceived up to half of all virtual movements as synchronous to their own movement (i.e., much more than actually were synchronous)&#x2014;several of those participants reported a feeling of ownership over the virtual hand in those synchronously perceived periods. While these were only spontaneous post-hoc reports, they tentatively support the idea of embodiment biasing delay detection in our task.</p>
<p>Experienced embodiment can substantially affect attentional control and task performance in virtual reality based movement tasks, as shown, e.g., by <xref ref-type="bibr" rid="ref24">Iwasaki et al. (2022)</xref>. In our case, one such effect could be intersensory conflict attenuation. As demonstrated in the &#x2018;rubber hand illusion&#x2019; (<xref ref-type="bibr" rid="ref8">Botvinick and Cohen, 1998</xref>), the embodiment of an alternative (fake or virtual) limb biases the subjective perception of visuoproprioceptive mismatches between real and fake limbs, attenuating intersensory conflict through updates to the corresponding neuronal representations (<xref ref-type="bibr" rid="ref30">Limanowski, 2022</xref>). Since visuomotor delays are, effectively, also visuoproprioceptive mismatches, we speculate that a similar intersensory conflict attenuation could occur at smaller, compared with larger delays. In other words, an increased embodiment of the virtual hand at smaller delay levels could have increased the tolerance for visuomotor mismatches, and thus, impaired delay detection performance. An increased embodiment of the virtual hand under small &#x003E; large delays could also have distorted the perception of movement timing more generally (<xref ref-type="bibr" rid="ref13">Desantis et al., 2016</xref>; <xref ref-type="bibr" rid="ref22">Haering and Kiesel, 2015</xref>; <xref ref-type="bibr" rid="ref46">Rohde and Ernst, 2016</xref>).</p>
<p>A complementary influence on detection performance could have resulted from larger visuomotor delays being more salient stimuli <italic>per se</italic>. This saliency effect could stem from the fact that these delays constitute &#x2018;non-standard&#x2019; visuomotor mappings; i.e., mappings that violate the life-long learned associations (of congruence) between motor commands and visual movement feedback (<xref ref-type="bibr" rid="ref45">Quirmbach and Limanowski, 2024</xref>; <xref ref-type="bibr" rid="ref56">Yon et al., 2018</xref>). Thus, larger delays could have captured more attention&#x2014;onto the visuomotor relationship, or more generally onto the visual movement feedback&#x2014;than smaller delays. This would likely have improved delay detection performance at larger delay levels. Note that this interpretation complements, rather than contradicting, the idea of an embodiment-related increased tolerance for changes at lower delay levels. However, as we did not explicitly assess subjective embodiment or attentional allocation, both of the above interpretations have to be explicitly tested in future experiments.</p>
<p>Finally, note that our task design implied maintaining focus on the real hand movements, to track the auditory rhythm, while the virtual hand moved incongruently. Visual body movements that are incongruent with one&#x2019;s actual movements and movement goals can capture attention and lead to behavioral deficits&#x2014;this is sometimes also referred to as visuomotor interference (<xref ref-type="bibr" rid="ref6">Blakemore and Frith, 2005</xref>; <xref ref-type="bibr" rid="ref10">Brass et al., 2001</xref>; <xref ref-type="bibr" rid="ref26">Kilner et al., 2003</xref>; <xref ref-type="bibr" rid="ref31">Limanowski and Friston, 2020</xref>; <xref ref-type="bibr" rid="ref34">Metral and Guerraz, 2019</xref>). The distracting effect of incongruent visual stimuli may be particularly strong when they depict one&#x2019;s own, delayed movements (<xref ref-type="bibr" rid="ref47">Salomon et al., 2013</xref>). The positive correlation between detection performance and tracking error suggests that participants who used more cognitive-attentional resources to maintain stable tracking lacked those resources for the detection task. The &#x2018;dual task&#x2019; nature of our design could also explain the relatively low overall detection performance we observed, in contrast to other studies using single-movement designs (<xref ref-type="bibr" rid="ref17">Farrer et al., 2008</xref>; <xref ref-type="bibr" rid="ref28">Leube et al., 2003</xref>). Interestingly, tracking performance did not differ between delay levels. This could suggest that our participants resisted or counter-acted any potential biasing effect of visual feedback delay on movement execution (cf. <xref ref-type="bibr" rid="ref31">Limanowski and Friston, 2020</xref>). An interesting question (which we, however, cannot answer) is whether an increased embodiment of the virtual hand would lead to stronger visuomotor interference effects (hence expected at smaller delays); or whether larger delays <italic>per se</italic> elicit stronger visuomotor interference effects.</p>
<p>Our study raised further questions that should be followed up by future work. Due to the nature of our design, we could not systematically compare detection and reaction times to delay in- vs. decreases. Although in a control analysis, we did not find any significant differences in detection scores between delay in- vs. decreases (see Methods), this needs to be addressed in detail by future work. Furthermore, it has been shown that, when moving under visuomotor delay, visual movement feedback is processed differently depending on whether vision is currently task-relevant or a distractor (<xref ref-type="bibr" rid="ref32">Limanowski et al., 2020</xref>; <xref ref-type="bibr" rid="ref31">Limanowski and Friston, 2020</xref>). An open question is whether visuomotor delay <italic>changes</italic> are processed differently depending on whether they are task-relevant or -irrelevant. Furthermore, we observed substantial performance differences (in tracking and detection) between participants; future work should look into the possible causes for such inter- but also intrapersonal variability. For instance, one factor influencing overall performance could have been prior experience with VR scenarios, which we did not assess. Visuomotor temporal incongruence can in principle also be implemented by leading, not lagging, vision; in the context of self-initiated hand movements, however, this is associated with a number of problems related to the predictability of the visual trajectories before actual movement. Finally, visual feedback delay is only one kind of visuomotor incongruence; future work should test whether other manipulations, such as spatial offset or gain scaling, produce similar results.</p>
<p>To conclude, we have shown that, during bodily action in virtual reality, the current degree of visuomotor delay can bias the perception of changes in the magnitude of this delay. This bias does not seem to follow the Weber-Fechner-Laws. Instead, bodily action under relatively small delays may entail some degree of tolerance for delay changes resulting from intersensory conflict attenuation due to a stronger embodiment of the (relatively more congruently moving) virtual hand; whereas large delays may capture (visual) attention due to their violation of visuomotor predictions. Thus, our results highlight the importance of cognitive-attentional factors in visuomotor processing; and the need to consider these factors when designing cyber-physical interactions with variable delays.</p>
</sec>
</body>
<back>
<sec sec-type="data-availability" id="sec8">
<title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec sec-type="ethics-statement" id="sec9">
<title>Ethics statement</title>
<p>The studies involving humans were approved by ethics committee of the Technische Universit&#x00E4;t Dresden (SR-EK-174032021). The studies were conducted in accordance with the local legislation and institutional requirements. The participants provided their written informed consent to participate in this study.</p>
</sec>
<sec sec-type="author-contributions" id="sec10">
<title>Author contributions</title>
<p>GV: Data curation, Formal analysis, Investigation, Methodology, Software, Validation, Visualization, Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing. JL: Conceptualization, Funding acquisition, Methodology, Project administration, Resources, Software, Supervision, Validation, Writing &#x2013; review &#x0026; editing.</p>
</sec>
<sec sec-type="funding-information" id="sec11">
<title>Funding</title>
<p>The author(s) declare that financial support was received for the research, authorship, and/or publication of this article. This work was funded by the German Research Foundation (DFG, Deutsche Forschungsgemeinschaft) as part of Germany&#x2019;s Excellence Strategy&#x2014;EXC 2050/1&#x2014;Project ID 390696704&#x2014;Cluster of Excellence &#x201C;Centre for Tactile Internet with Human-in-the-Loop&#x201D; (CeTI) of Technische Universit&#x00E4;t Dresden. JL was supported by a Freigeist Fellowship of the VolkswagenStiftung (AZ 97-932).</p>
</sec>
<sec sec-type="COI-statement" id="sec12">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="disclaimer" id="sec13">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="ref1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Asai</surname> <given-names>T.</given-names></name></person-group> (<year>2015</year>). <article-title>Feedback control of one&#x2019;s own action: self-other sensory attribution in motor control</article-title>. <source>Conscious. Cogn.</source> <volume>38</volume>, <fpage>118</fpage>&#x2013;<lpage>129</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.concog.2015.11.002</pub-id>, PMID: <pub-id pub-id-type="pmid">26587957</pub-id></citation></ref>
<ref id="ref2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Augustin</surname> <given-names>T.</given-names></name></person-group> (<year>2008</year>). <article-title>The parameters in the near-miss-to-Weber&#x2019;s law</article-title>. <source>J. Math. Psychol.</source> <volume>52</volume>, <fpage>37</fpage>&#x2013;<lpage>47</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jmp.2007.11.001</pub-id></citation></ref>
<ref id="ref3"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Augustin</surname> <given-names>T.</given-names></name> <name><surname>Tanja</surname> <given-names>R.</given-names></name></person-group> (<year>2008</year>). Empirical evaluation of the near-miss-to-Weber&#x2019;s law: a visual discrimination experiment. Psychology Science Quarterly, 50.</citation></ref>
<ref id="ref4"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Baird</surname> <given-names>J. C.</given-names></name></person-group> (<year>1997</year>). Sensation and judgment: Complementarity theory of psychophysics. Lawrence Erlbaum Associates. Available at: <ext-link xlink:href="https://search.ebscohost.com/login.aspx?direct=true&#x0026;scope=site&#x0026;db=nlebk&#x0026;db=nlabk&#x0026;AN=707812" ext-link-type="uri">https://search.ebscohost.com/login.aspx?direct=true&#x0026;scope=site&#x0026;db=nlebk&#x0026;db=nlabk&#x0026;AN=707812</ext-link> (accessed April 29, 2024).</citation></ref>
<ref id="ref5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Barnes</surname> <given-names>L. R.</given-names></name> <name><surname>Gruntfest</surname> <given-names>E. C.</given-names></name> <name><surname>Hayden</surname> <given-names>M. H.</given-names></name> <name><surname>Schultz</surname> <given-names>D. M.</given-names></name> <name><surname>Benight</surname> <given-names>C.</given-names></name></person-group> (<year>2007</year>). <article-title>False alarms and close calls: a conceptual model of warning accuracy</article-title>. <source>Weather Forecast.</source> <volume>22</volume>, <fpage>1140</fpage>&#x2013;<lpage>1147</lpage>. doi: <pub-id pub-id-type="doi">10.1175/WAF1031.1</pub-id></citation></ref>
<ref id="ref6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Blakemore</surname> <given-names>S.-J.</given-names></name> <name><surname>Frith</surname> <given-names>C.</given-names></name></person-group> (<year>2005</year>). <article-title>The role of motor contagion in the prediction of action</article-title>. <source>Neuropsychologia</source> <volume>43</volume>, <fpage>260</fpage>&#x2013;<lpage>267</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuropsychologia.2004.11.012</pub-id>, PMID: <pub-id pub-id-type="pmid">15707910</pub-id></citation></ref>
<ref id="ref8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Botvinick</surname> <given-names>M.</given-names></name> <name><surname>Cohen</surname> <given-names>J.</given-names></name></person-group> (<year>1998</year>). <article-title>Rubber hands &#x2018;feel&#x2019; touch that eyes see</article-title>. <source>Nature</source> <volume>391</volume>:<fpage>756</fpage>. doi: <pub-id pub-id-type="doi">10.1038/35784</pub-id>, PMID: <pub-id pub-id-type="pmid">9486643</pub-id></citation></ref>
<ref id="ref9"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Brannon</surname> <given-names>E. M.</given-names></name> <name><surname>Libertus</surname> <given-names>M. E.</given-names></name> <name><surname>Meck</surname> <given-names>W. H.</given-names></name> <name><surname>Woldorff</surname> <given-names>M. G.</given-names></name></person-group> (<year>2008</year>). <article-title>Electrophysiological measures of time processing in infant and adult brains: Weber&#x2019;s law holds</article-title>. <source>J. Cogn. Neurosci.</source> <volume>20</volume>, <fpage>193</fpage>&#x2013;<lpage>203</lpage>. doi: <pub-id pub-id-type="doi">10.1162/jocn.2008.20016</pub-id>, PMID: <pub-id pub-id-type="pmid">18275328</pub-id></citation></ref>
<ref id="ref10"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Brass</surname> <given-names>M.</given-names></name> <name><surname>Bekkering</surname> <given-names>H.</given-names></name> <name><surname>Prinz</surname> <given-names>W.</given-names></name></person-group> (<year>2001</year>). <article-title>Movement observation affects movement execution in a simple response task</article-title>. <source>Acta Psychol.</source> <volume>106</volume>, <fpage>3</fpage>&#x2013;<lpage>22</lpage>. doi: <pub-id pub-id-type="doi">10.1016/S0001-6918(00)00024-X</pub-id>, PMID: <pub-id pub-id-type="pmid">11256338</pub-id></citation></ref>
<ref id="ref11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Carriot</surname> <given-names>J.</given-names></name> <name><surname>Cullen</surname> <given-names>K.</given-names></name> <name><surname>Chacron</surname> <given-names>M.</given-names></name></person-group> (<year>2021</year>). <article-title>The neural basis for violations of Weber&#x2019;s law in self-motion perception</article-title>. <source>Proc. Natl. Acad. Sci.</source> <volume>118</volume>:<fpage>e2025061118</fpage>. doi: <pub-id pub-id-type="doi">10.1073/pnas.2025061118</pub-id>, PMID: <pub-id pub-id-type="pmid">34475203</pub-id></citation></ref>
<ref id="ref12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Charalampaki</surname> <given-names>A.</given-names></name> <name><surname>Ciston</surname> <given-names>A. B.</given-names></name> <name><surname>Filevich</surname> <given-names>E.</given-names></name></person-group> (<year>2024</year>). <article-title>Contributions of tactile information to the sense of agency and its metacognitive representations</article-title>. <source>J. Exp. Psychol. Gen.</source> <volume>153</volume>, <fpage>2427</fpage>&#x2013;<lpage>2440</lpage>. doi: <pub-id pub-id-type="doi">10.1037/xge0001634</pub-id>, PMID: <pub-id pub-id-type="pmid">39115900</pub-id></citation></ref>
<ref id="ref13"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Desantis</surname> <given-names>A.</given-names></name> <name><surname>Waszak</surname> <given-names>F.</given-names></name> <name><surname>Moutsopoulou</surname> <given-names>K.</given-names></name> <name><surname>Haggard</surname> <given-names>P.</given-names></name></person-group> (<year>2016</year>). <article-title>How action structures time: about the perceived temporal order of action and predicted outcomes</article-title>. <source>Cognition</source> <volume>146</volume>, <fpage>100</fpage>&#x2013;<lpage>109</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cognition.2015.08.011</pub-id>, PMID: <pub-id pub-id-type="pmid">26409246</pub-id></citation></ref>
<ref id="ref14"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dewey</surname> <given-names>J. A.</given-names></name></person-group> (<year>2023</year>). <article-title>Cognitive load decreases the sense of agency during continuous action</article-title>. <source>Acta Psychol.</source> <volume>233</volume>:<fpage>103824</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.actpsy.2022.103824</pub-id>, PMID: <pub-id pub-id-type="pmid">36623472</pub-id></citation></ref>
<ref id="ref15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Doble</surname> <given-names>C. W.</given-names></name> <name><surname>Falmagne</surname> <given-names>J.-C.</given-names></name> <name><surname>Berg</surname> <given-names>B. G.</given-names></name></person-group> (<year>2003</year>). <article-title>Recasting (the near-miss to) Weber&#x2019;s law</article-title>. <source>Psychol. Rev.</source> <volume>110</volume>, <fpage>365</fpage>&#x2013;<lpage>375</lpage>. doi: <pub-id pub-id-type="doi">10.1037/0033-295X.110.2.365</pub-id>, PMID: <pub-id pub-id-type="pmid">12747528</pub-id></citation></ref>
<ref id="ref16"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dummer</surname> <given-names>T.</given-names></name> <name><surname>Picot-Annand</surname> <given-names>A.</given-names></name> <name><surname>Neal</surname> <given-names>T.</given-names></name> <name><surname>Moore</surname> <given-names>C.</given-names></name></person-group> (<year>2009</year>). <article-title>Movement and the rubber hand illusion</article-title>. <source>Perception</source> <volume>38</volume>, <fpage>271</fpage>&#x2013;<lpage>280</lpage>. doi: <pub-id pub-id-type="doi">10.1068/p5921</pub-id>, PMID: <pub-id pub-id-type="pmid">19400435</pub-id></citation></ref>
<ref id="ref17"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Farrer</surname> <given-names>C.</given-names></name> <name><surname>Frey</surname> <given-names>S. H.</given-names></name> <name><surname>Van Horn</surname> <given-names>J. D.</given-names></name> <name><surname>Tunik</surname> <given-names>E.</given-names></name> <name><surname>Turk</surname> <given-names>D.</given-names></name> <name><surname>Inati</surname> <given-names>S.</given-names></name> <etal/></person-group>. (<year>2008</year>). <article-title>The angular gyrus computes action awareness representations</article-title>. <source>Cereb. Cortex</source> <volume>18</volume>, <fpage>254</fpage>&#x2013;<lpage>261</lpage>. doi: <pub-id pub-id-type="doi">10.1093/cercor/bhm050</pub-id>, PMID: <pub-id pub-id-type="pmid">17490989</pub-id></citation></ref>
<ref id="ref18"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Fechner</surname> <given-names>G. T.</given-names></name> <name><surname>Boring</surname> <given-names>E. G.</given-names></name> <name><surname>Howes</surname> <given-names>D. H.</given-names></name> <name><surname>Adler</surname> <given-names>H. E.</given-names></name></person-group> (<year>1966</year>). Elements of psychophysics. Volume 1/Gustav Fechner; translated by Helmut E. Adler; edited by David H. Howes, Edwin G. Boring; with an introduction by Edwin G. Boring. Holt, Rinehart and Winston, Inc.</citation></ref>
<ref id="ref19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Foulkes</surname> <given-names>A. J.</given-names></name> <name><surname>Miall</surname> <given-names>R. C.</given-names></name></person-group> (<year>2000</year>). <article-title>Adaptation to visual feedback delays in a human manual tracking task</article-title>. <source>Exp. Brain Res.</source> <volume>131</volume>, <fpage>101</fpage>&#x2013;<lpage>110</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s002219900286</pub-id>, PMID: <pub-id pub-id-type="pmid">10759175</pub-id></citation></ref>
<ref id="ref20"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ganel</surname> <given-names>T.</given-names></name> <name><surname>Chajut</surname> <given-names>E.</given-names></name> <name><surname>Algom</surname> <given-names>D.</given-names></name></person-group> (<year>2008</year>). <article-title>Visual coding for action violates fundamental psychophysical principles</article-title>. <source>Curr. Biol.</source> <volume>18</volume>, <fpage>R599</fpage>&#x2013;<lpage>R601</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cub.2008.04.052</pub-id>, PMID: <pub-id pub-id-type="pmid">18644333</pub-id></citation></ref>
<ref id="ref21"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Gescheider</surname> <given-names>G. A.</given-names></name></person-group> (<year>1997</year>) in <source>Psychophysics: The fundamentals</source>. ed. <person-group person-group-type="editor"><name><surname>Mahwah</surname> <given-names>N. J.</given-names></name></person-group> (<publisher-loc>New Jersey, USA</publisher-loc>: <publisher-name>L. Erlbaum Associates</publisher-name>).</citation></ref>
<ref id="ref22"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Haering</surname> <given-names>C.</given-names></name> <name><surname>Kiesel</surname> <given-names>A.</given-names></name></person-group> (<year>2015</year>). <article-title>Was it me when it happened too early? Experience of delayed effects shapes sense of agency</article-title>. <source>Cognition</source> <volume>136</volume>, <fpage>38</fpage>&#x2013;<lpage>42</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cognition.2014.11.012</pub-id>, PMID: <pub-id pub-id-type="pmid">25490127</pub-id></citation></ref>
<ref id="ref23"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Haering</surname> <given-names>C.</given-names></name> <name><surname>Kiesel</surname> <given-names>A.</given-names></name></person-group> (<year>2016</year>). <article-title>Time perception and the experience of agency</article-title>. <source>Psychol. Res.</source> <volume>80</volume>, <fpage>286</fpage>&#x2013;<lpage>297</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s00426-015-0654-0</pub-id>, PMID: <pub-id pub-id-type="pmid">25749800</pub-id></citation></ref>
<ref id="ref24"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Iwasaki</surname> <given-names>Y.</given-names></name> <name><surname>Navarro</surname> <given-names>B.</given-names></name> <name><surname>Iwata</surname> <given-names>H.</given-names></name> <name><surname>Ganesh</surname> <given-names>G.</given-names></name></person-group> (<year>2022</year>). <article-title>Embodiment modifies attention allotment for the benefit of dual task performance</article-title>. <source>Commun. Biol.</source> <volume>5</volume>, <fpage>701</fpage>&#x2013;<lpage>707</lpage>. doi: <pub-id pub-id-type="doi">10.1038/s42003-022-03603-6</pub-id>, PMID: <pub-id pub-id-type="pmid">35835983</pub-id></citation></ref>
<ref id="ref25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jain</surname> <given-names>A.</given-names></name> <name><surname>Bansal</surname> <given-names>R.</given-names></name> <name><surname>Kumar</surname> <given-names>A.</given-names></name> <name><surname>Singh</surname> <given-names>K. D.</given-names></name></person-group> (<year>2015</year>). <article-title>A comparative study of visual and auditory reaction times on the basis of gender and physical activity levels of medical first year students</article-title>. <source>Int. J. Appl. Basic Med. Res.</source> <volume>5</volume>, <fpage>124</fpage>&#x2013;<lpage>127</lpage>. doi: <pub-id pub-id-type="doi">10.4103/2229-516X.157168</pub-id>, PMID: <pub-id pub-id-type="pmid">26097821</pub-id></citation></ref>
<ref id="ref26"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kilner</surname> <given-names>J. M.</given-names></name> <name><surname>Paulignan</surname> <given-names>Y.</given-names></name> <name><surname>Blakemore</surname> <given-names>S. J.</given-names></name></person-group> (<year>2003</year>). <article-title>An interference effect of observed biological movement on action</article-title>. <source>Curr. Biol.</source> <volume>13</volume>, <fpage>522</fpage>&#x2013;<lpage>525</lpage>. doi: <pub-id pub-id-type="doi">10.1016/S0960-9822(03)00165-9</pub-id>, PMID: <pub-id pub-id-type="pmid">12646137</pub-id></citation></ref>
<ref id="ref27"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Krugwasser</surname> <given-names>A. R.</given-names></name> <name><surname>Harel</surname> <given-names>E. V.</given-names></name> <name><surname>Salomon</surname> <given-names>R.</given-names></name></person-group> (<year>2019</year>). <article-title>The boundaries of the self: the sense of agency across different sensorimotor aspects</article-title>. <source>J. Vis.</source> <volume>19</volume>:<fpage>14</fpage>. doi: <pub-id pub-id-type="doi">10.1167/19.4.14</pub-id>, PMID: <pub-id pub-id-type="pmid">30952165</pub-id></citation></ref>
<ref id="ref28"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Leube</surname> <given-names>D. T.</given-names></name> <name><surname>Knoblich</surname> <given-names>G.</given-names></name> <name><surname>Erb</surname> <given-names>M.</given-names></name> <name><surname>Kircher</surname> <given-names>T. T. J.</given-names></name></person-group> (<year>2003</year>). <article-title>Observing one&#x2019;s hand become anarchic: an fMRI study of action identification</article-title>. <source>Conscious. Cogn.</source> <volume>12</volume>, <fpage>597</fpage>&#x2013;<lpage>608</lpage>. doi: <pub-id pub-id-type="doi">10.1016/S1053-8100(03)00079-5</pub-id>, PMID: <pub-id pub-id-type="pmid">14656503</pub-id></citation></ref>
<ref id="ref29"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Limanowski</surname> <given-names>J.</given-names></name> <name><surname>Kirilina</surname> <given-names>E.</given-names></name> <name><surname>Blankenburg</surname> <given-names>F.</given-names></name></person-group> (<year>2017</year>). <article-title>Neuronal correlates of continuous manual tracking under varying visual movement feedback in a virtual reality environment</article-title>. <source>NeuroImage</source>. <volume>146</volume>, <fpage>81</fpage>&#x2013;<lpage>89</lpage>.</citation></ref>
<ref id="ref30"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Limanowski</surname> <given-names>J.</given-names></name></person-group> (<year>2022</year>). <article-title>Precision control for a flexible body representation</article-title>. <source>Neurosci. Biobehav. Rev.</source> <volume>134</volume>:<fpage>104401</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neubiorev.2021.10.023</pub-id>, PMID: <pub-id pub-id-type="pmid">34736884</pub-id></citation></ref>
<ref id="ref31"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Limanowski</surname> <given-names>J.</given-names></name> <name><surname>Friston</surname> <given-names>K.</given-names></name></person-group> (<year>2020</year>). <article-title>Attentional modulation of vision versus proprioception during action</article-title>. <source>Cereb. Cortex</source> <volume>30</volume>, <fpage>1637</fpage>&#x2013;<lpage>1648</lpage>. doi: <pub-id pub-id-type="doi">10.1093/cercor/bhz192</pub-id>, PMID: <pub-id pub-id-type="pmid">31670769</pub-id></citation></ref>
<ref id="ref32"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Limanowski</surname> <given-names>J.</given-names></name> <name><surname>Litvak</surname> <given-names>V.</given-names></name> <name><surname>Friston</surname> <given-names>K.</given-names></name></person-group> (<year>2020</year>). <article-title>Cortical beta oscillations reflect the contextual gating of visual action feedback</article-title>. <source>NeuroImage</source> <volume>222</volume>:<fpage>117267</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2020.117267</pub-id>, PMID: <pub-id pub-id-type="pmid">32818621</pub-id></citation></ref>
<ref id="ref33"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mallery</surname> <given-names>R. M.</given-names></name> <name><surname>Olomu</surname> <given-names>O. U.</given-names></name> <name><surname>Uchanski</surname> <given-names>R. M.</given-names></name> <name><surname>Militchin</surname> <given-names>V. A.</given-names></name> <name><surname>Hullar</surname> <given-names>T. E.</given-names></name></person-group> (<year>2010</year>). <article-title>Human discrimination of rotational velocities</article-title>. <source>Exp. Brain Res.</source> <volume>204</volume>, <fpage>11</fpage>&#x2013;<lpage>20</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s00221-010-2288-1</pub-id>, PMID: <pub-id pub-id-type="pmid">20526711</pub-id></citation></ref>
<ref id="ref34"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Metral</surname> <given-names>M.</given-names></name> <name><surname>Guerraz</surname> <given-names>M.</given-names></name></person-group> (<year>2019</year>). <article-title>Fake hand in movement: visual motion cues from the rubber hand are processed for kinesthesia</article-title>. <source>Conscious. Cogn.</source> <volume>73</volume>:<fpage>102761</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.concog.2019.05.009</pub-id>, PMID: <pub-id pub-id-type="pmid">31200242</pub-id></citation></ref>
<ref id="ref35"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Miall</surname> <given-names>R. C.</given-names></name> <name><surname>Jackson</surname> <given-names>J. K.</given-names></name></person-group> (<year>2006</year>). <article-title>Adaptation to visual feedback delays in manual tracking: evidence against the Smith predictor model of human visually guided action</article-title>. <source>Exp. Brain Res.</source> <volume>172</volume>, <fpage>77</fpage>&#x2013;<lpage>84</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s00221-005-0306-5</pub-id>, PMID: <pub-id pub-id-type="pmid">16424978</pub-id></citation></ref>
<ref id="ref36"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Miall</surname> <given-names>R. C.</given-names></name> <name><surname>Weir</surname> <given-names>D. J.</given-names></name> <name><surname>Stein</surname> <given-names>J. F.</given-names></name></person-group> (<year>1985</year>). <article-title>Visuomotor tracking with delayed visual feedback</article-title>. <source>Neuroscience</source> <volume>16</volume>, <fpage>511</fpage>&#x2013;<lpage>520</lpage>. doi: <pub-id pub-id-type="doi">10.1016/0306-4522(85)90189-7</pub-id>, PMID: <pub-id pub-id-type="pmid">4094689</pub-id></citation></ref>
<ref id="ref37"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Miall</surname> <given-names>R. C.</given-names></name> <name><surname>Wolpert</surname> <given-names>D. M.</given-names></name></person-group> (<year>1996</year>). <article-title>Forward models for physiological motor control</article-title>. <source>Neural Netw.</source> <volume>9</volume>, <fpage>1265</fpage>&#x2013;<lpage>1279</lpage>. doi: <pub-id pub-id-type="doi">10.1016/S0893-6080(96)00035-4</pub-id>, PMID: <pub-id pub-id-type="pmid">12662535</pub-id></citation></ref>
<ref id="ref38"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Namboodiri</surname> <given-names>V. M. K.</given-names></name> <name><surname>Mihalas</surname> <given-names>S.</given-names></name> <name><surname>Hussain Shuler</surname> <given-names>M. G.</given-names></name></person-group> (<year>2014</year>). <article-title>A temporal basis for Weber&#x2019;s law in value perception</article-title>. <source>Front. Integr. Neurosci.</source> <volume>8</volume>. doi: <pub-id pub-id-type="doi">10.3389/fnint.2014.00079</pub-id>, PMID: <pub-id pub-id-type="pmid">25352791</pub-id></citation></ref>
<ref id="ref39"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nesti</surname> <given-names>A.</given-names></name> <name><surname>Beykirch</surname> <given-names>K. A.</given-names></name> <name><surname>Pretto</surname> <given-names>P.</given-names></name> <name><surname>B&#x00FC;lthoff</surname> <given-names>H. H.</given-names></name></person-group> (<year>2015</year>). <article-title>Human discrimination of head-centred visual&#x2013;inertial yaw rotations</article-title>. <source>Exp. Brain Res.</source> <volume>233</volume>, <fpage>3553</fpage>&#x2013;<lpage>3564</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s00221-015-4426-2</pub-id>, PMID: <pub-id pub-id-type="pmid">26319547</pub-id></citation></ref>
<ref id="ref40"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nieder</surname> <given-names>A.</given-names></name> <name><surname>Miller</surname> <given-names>E. K.</given-names></name></person-group> (<year>2003</year>). <article-title>Coding of cognitive magnitude: compressed scaling of numerical information in the primate prefrontal cortex</article-title>. <source>Neuron</source> <volume>37</volume>, <fpage>149</fpage>&#x2013;<lpage>157</lpage>. doi: <pub-id pub-id-type="doi">10.1016/S0896-6273(02)01144-3</pub-id>, PMID: <pub-id pub-id-type="pmid">12526780</pub-id></citation></ref>
<ref id="ref41"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nouri</surname> <given-names>S.</given-names></name> <name><surname>Karmali</surname> <given-names>F.</given-names></name></person-group> (<year>2018</year>). <article-title>Variability in the Vestibulo-ocular reflex and vestibular perception</article-title>. <source>Neuroscience</source> <volume>393</volume>, <fpage>350</fpage>&#x2013;<lpage>365</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroscience.2018.08.025</pub-id>, PMID: <pub-id pub-id-type="pmid">30189227</pub-id></citation></ref>
<ref id="ref42"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Parvin</surname> <given-names>D. E.</given-names></name> <name><surname>Tsay</surname> <given-names>J.</given-names></name> <name><surname>Dang</surname> <given-names>K. V.</given-names></name> <name><surname>Stover</surname> <given-names>A. R.</given-names></name> <name><surname>Ivry</surname> <given-names>R. B.</given-names></name> <name><surname>Morehead</surname> <given-names>J. R.</given-names></name></person-group> (<year>2024</year>). <article-title>Implicit adaptation is modulated by the relevance of feedback</article-title>. <source>bioRxiv</source>:<fpage>2022.01.19.476924</fpage>. doi: <pub-id pub-id-type="doi">10.1101/2022.01.19.476924</pub-id></citation></ref>
<ref id="ref43"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Peters</surname> <given-names>B. S.</given-names></name> <name><surname>Armijo</surname> <given-names>P. R.</given-names></name> <name><surname>Krause</surname> <given-names>C.</given-names></name> <name><surname>Choudhury</surname> <given-names>S. A.</given-names></name> <name><surname>Oleynikov</surname> <given-names>D.</given-names></name></person-group> (<year>2018</year>). <article-title>Review of emerging surgical robotic technology</article-title>. <source>Surg. Endosc.</source> <volume>32</volume>, <fpage>1636</fpage>&#x2013;<lpage>1655</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s00464-018-6079-2</pub-id>, PMID: <pub-id pub-id-type="pmid">29442240</pub-id></citation></ref>
<ref id="ref44"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Planthaber</surname> <given-names>S.</given-names></name> <name><surname>Mallwitz</surname> <given-names>M.</given-names></name> <name><surname>Kirchner</surname> <given-names>E. A.</given-names></name></person-group> (<year>2018</year>). <article-title>Immersive robot control in virtual reality to command robots in space missions</article-title>. <source>J. Softw. Eng. Appl.</source> <volume>11</volume>, <fpage>341</fpage>&#x2013;<lpage>347</lpage>. doi: <pub-id pub-id-type="doi">10.4236/jsea.2018.117021</pub-id></citation></ref>
<ref id="ref45"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Quirmbach</surname> <given-names>F.</given-names></name> <name><surname>Limanowski</surname> <given-names>J.</given-names></name></person-group> (<year>2024</year>). <article-title>Visuomotor prediction during action planning in the human frontoparietal cortex and cerebellum</article-title>. <source>Cereb. Cortex</source> <volume>34</volume>. doi: <pub-id pub-id-type="doi">10.1093/cercor/bhae382</pub-id>, PMID: <pub-id pub-id-type="pmid">39325000</pub-id></citation></ref>
<ref id="ref46"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rohde</surname> <given-names>M.</given-names></name> <name><surname>Ernst</surname> <given-names>M. O.</given-names></name></person-group> (<year>2016</year>). <article-title>Time, agency, and sensory feedback delays during action</article-title>. <source>Curr. Opin. Behav. Sci.</source> <volume>8</volume>, <fpage>193</fpage>&#x2013;<lpage>199</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cobeha.2016.02.029</pub-id></citation></ref>
<ref id="ref47"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Salomon</surname> <given-names>R.</given-names></name> <name><surname>Lim</surname> <given-names>M.</given-names></name> <name><surname>Kannape</surname> <given-names>O.</given-names></name> <name><surname>Llobera</surname> <given-names>J.</given-names></name> <name><surname>Blanke</surname> <given-names>O.</given-names></name></person-group> (<year>2013</year>). <article-title>&#x201C;Self pop-out&#x201D;: agency enhances self-recognition in visual search</article-title>. <source>Exp. Brain Res.</source> <volume>228</volume>, <fpage>173</fpage>&#x2013;<lpage>181</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s00221-013-3549-6</pub-id>, PMID: <pub-id pub-id-type="pmid">23665753</pub-id></citation></ref>
<ref id="ref48"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Schulze</surname> <given-names>A.</given-names></name> <name><surname>Bodenstedt</surname> <given-names>S.</given-names></name> <name><surname>Distler</surname> <given-names>M.</given-names></name> <name><surname>Weitz</surname> <given-names>J.</given-names></name> <name><surname>Speidel</surname> <given-names>S.</given-names></name> <name><surname>Wagner</surname> <given-names>M.</given-names></name></person-group> (<year>2024</year>). <article-title>K&#x00FC;nstliche Intelligenz in der onkologischen Chirurgie</article-title>. <source>Die Onkol.</source> <volume>30</volume>, <fpage>372</fpage>&#x2013;<lpage>379</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s00761-024-01486-2</pub-id></citation></ref>
<ref id="ref49"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Shadmehr</surname> <given-names>R.</given-names></name> <name><surname>Smith</surname> <given-names>M. A.</given-names></name> <name><surname>Krakauer</surname> <given-names>J. W.</given-names></name></person-group> (<year>2010</year>). <article-title>Error correction, sensory prediction, and adaptation in motor control</article-title>. <source>Annu. Rev. Neurosci.</source> <volume>33</volume>, <fpage>89</fpage>&#x2013;<lpage>108</lpage>. doi: <pub-id pub-id-type="doi">10.1146/annurev-neuro-060909-153135</pub-id>, PMID: <pub-id pub-id-type="pmid">20367317</pub-id></citation></ref>
<ref id="ref50"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Takahashi</surname> <given-names>T.</given-names></name></person-group> (<year>2005</year>). <article-title>Loss of self-control in intertemporal choice may be attributable to logarithmic time-perception</article-title>. <source>Med. Hypotheses</source> <volume>65</volume>, <fpage>691</fpage>&#x2013;<lpage>693</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.mehy.2005.04.040</pub-id>, PMID: <pub-id pub-id-type="pmid">15990243</pub-id></citation></ref>
<ref id="ref51"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Takahashi</surname> <given-names>T.</given-names></name></person-group> (<year>2006</year>). <article-title>Time-estimation error following weber-Fechner law may explain subadditive time-discounting</article-title>. <source>Med. Hypotheses</source> <volume>67</volume>, <fpage>1372</fpage>&#x2013;<lpage>1374</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.mehy.2006.05.056</pub-id>, PMID: <pub-id pub-id-type="pmid">16872753</pub-id></citation></ref>
<ref id="ref52"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Todorov</surname> <given-names>E.</given-names></name> <name><surname>Jordan</surname> <given-names>M. I.</given-names></name></person-group> (<year>2002</year>). <article-title>Optimal feedback control as a theory of motor coordination</article-title>. <source>Nat. Neurosci.</source> <volume>5</volume>, <fpage>1226</fpage>&#x2013;<lpage>1235</lpage>. doi: <pub-id pub-id-type="doi">10.1038/nn963</pub-id>, PMID: <pub-id pub-id-type="pmid">12404008</pub-id></citation></ref>
<ref id="ref53"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tsakiris</surname> <given-names>M.</given-names></name> <name><surname>Prabhu</surname> <given-names>G.</given-names></name> <name><surname>Haggard</surname> <given-names>P.</given-names></name></person-group> (<year>2006</year>). <article-title>Having a body versus moving your body: how agency structures body-ownership</article-title>. <source>Conscious. Cogn.</source> <volume>15</volume>, <fpage>423</fpage>&#x2013;<lpage>432</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.concog.2005.09.004</pub-id>, PMID: <pub-id pub-id-type="pmid">16343947</pub-id></citation></ref>
<ref id="ref54"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wen</surname> <given-names>W.</given-names></name> <name><surname>Imamizu</surname> <given-names>H.</given-names></name></person-group> (<year>2022</year>). <article-title>The sense of agency in perception, behaviour and human&#x2013;machine interactions</article-title>. <source>Nat. Rev. Psychol.</source> <volume>1</volume>, <fpage>211</fpage>&#x2013;<lpage>222</lpage>. doi: <pub-id pub-id-type="doi">10.1038/s44159-022-00030-6</pub-id></citation></ref>
<ref id="ref55"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wolpert</surname> <given-names>D. M.</given-names></name> <name><surname>Kawato</surname> <given-names>M.</given-names></name></person-group> (<year>1998</year>). <article-title>Multiple paired forward and inverse models for motor control</article-title>. <source>Neural Netw.</source> <volume>11</volume>, <fpage>1317</fpage>&#x2013;<lpage>1329</lpage>. doi: <pub-id pub-id-type="doi">10.1016/S0893-6080(98)00066-5</pub-id>, PMID: <pub-id pub-id-type="pmid">12662752</pub-id></citation></ref>
<ref id="ref56"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Yon</surname> <given-names>D.</given-names></name> <name><surname>Gilbert</surname> <given-names>S. J.</given-names></name> <name><surname>de Lange</surname> <given-names>F. P.</given-names></name> <name><surname>Press</surname> <given-names>C.</given-names></name></person-group> (<year>2018</year>). <article-title>Action sharpens sensory representations of expected outcomes</article-title>. <source>Nat. Commun.</source> <volume>9</volume>:<fpage>4288</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41467-018-06752-7</pub-id>, PMID: <pub-id pub-id-type="pmid">30327503</pub-id></citation></ref>
</ref-list>
</back>
</article>