<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xml:lang="EN" article-type="research-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Syst. Neurosci.</journal-id>
<journal-title-group>
<journal-title>Frontiers in Systems Neuroscience</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Syst. Neurosci.</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">1662-5137</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fnsys.2026.1778604</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Hypothesis and Theory</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Functional sufficiency in VR: achieving non-corporeal embodiment</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name><surname>Wright</surname> <given-names>Malcolm</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/3334345/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Petit</surname> <given-names>Olivia</given-names></name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1148066/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Schnack</surname> <given-names>Alexander</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
</contrib-group>
<aff id="aff1"><label>1</label><institution>Massey Business School, Massey University</institution>, <city>Auckland</city>, <country country="nz">New Zealand</country></aff>
<aff id="aff2"><label>2</label><institution>Ehrenberg Bass Institute for Marketing Science, Adelaide University</institution>, <city>Adelaide, SA</city>, <country country="au">Australia</country></aff>
<aff id="aff3"><label>3</label><institution>Kedge Business School</institution>, <city>Marseille</city>, <country country="fr">France</country></aff>
<author-notes>
<corresp id="c001"><label>&#x002A;</label>Correspondence: Malcolm Wright, <email xlink:href="mailto:m.j.wright@massey.ac.nz">m.j.wright@massey.ac.nz</email></corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-02-27">
<day>27</day>
<month>02</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2026</year>
</pub-date>
<volume>20</volume>
<elocation-id>1778604</elocation-id>
<history>
<date date-type="received">
<day>31</day>
<month>12</month>
<year>2025</year>
</date>
<date date-type="rev-recd">
<day>05</day>
<month>02</month>
<year>2026</year>
</date>
<date date-type="accepted">
<day>06</day>
<month>02</month>
<year>2026</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2026 Wright, Petit and Schnack.</copyright-statement>
<copyright-year>2026</copyright-year>
<copyright-holder>Wright, Petit and Schnack</copyright-holder>
<license>
<ali:license_ref start_date="2026-02-27">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<p>This article provides a novel functionalist account of embodiment in immersive virtual environments, grounded in a formal model of cognition, supported by past empirical evidence, and offering a testable framework for predicting when virtual experiences will produce cognitive and emotional effects. Our approach complements existing work on telepresence and subjective experience by applying the Thin Model as an intermediate theory linking interface affordances to perception, emotion, and behavior. Drawing on previously published immersive virtual reality studies, we show that when key functional elements - such as sensing, recognition, inspection, and feedback - are preserved, behavioral and emotional outcomes remain stable even when locomotion mechanisms differ. These findings support a criterion of functional sufficiency for embodiment where interface substitution leaves core policies of action unchanged. We outline a set of theory-driven tests to identify the limits of this invariance and argue that embodiment should be defined by the integrity of the perception&#x2013;action loop, not by anatomical mimicry.</p>
</abstract>
<kwd-group>
<kwd>ecological validity</kwd>
<kwd>embodiment</kwd>
<kwd>functional sufficiency</kwd>
<kwd>immersive virtual reality</kwd>
<kwd>mental simulation</kwd>
<kwd>multisensory integration</kwd>
<kwd>perception-action loop</kwd>
<kwd>telepresence</kwd>
</kwd-group>
<funding-group>
<funding-statement>The author(s) declared that financial support was received for this work and/or its publication. MW was supported by the MSA Charitable Trust. OP was supported by the French National Research Agency (ANR) under the METAVETRE project, Award Number ANR-23-CE26-0015.</funding-statement>
</funding-group>
<counts>
<fig-count count="1"/>
<table-count count="0"/>
<equation-count count="0"/>
<ref-count count="59"/>
<page-count count="9"/>
<word-count count="6956"/>
</counts>
</article-meta>
</front>
<body>
<sec id="S1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>Although virtual environments may be seen by some as mere simulations, they provide domains in which real perception and action may still occur. <xref ref-type="bibr" rid="B12">Chalmers (2022)</xref> argues that virtual experiences and objects possess the same ontological and epistemic status as their physical counterparts: what matters is causal engagement, not the substrate generating perceptions. <xref ref-type="bibr" rid="B41">Petit et al. (2022)</xref> extend this perspective, emphasizing that conscious experience depends on the brain&#x2019;s predictive modeling of incoming sensory inputs. Even when inputs are virtual, people can be expected to generate anticipatory models that allow them to perceive, feel, and evaluate objects continuously with real-world experience (<xref ref-type="bibr" rid="B52">Tal et al., 2026</xref>). Mental simulation, grounded in prior sensory and motor encounters, enables users to &#x201C;fill in the blanks&#x201D; imagining how an object might look, feel, or sound, while visual technologies can further enhance the vividness and personal relevance of these predictions (<xref ref-type="bibr" rid="B42">Petit et al., 2019</xref>). In this sense, virtual environments instantiate the functional processes of perception, evaluation, and reflection, expanding what it means to consciously experience.</p>
<p>Together, these views license treating virtual encounters as real enough to matter - not as illusions to be corrected, but as legitimate contexts in which consciousness, decision, and ethics unfold. This position aligns with Petit&#x2019;s work on consumer digital consciousness, which explores how embodied perception and valuation operate across physical and virtual boundaries (<xref ref-type="bibr" rid="B40">Petit and Velasco, 2026</xref>). It also raises new questions for the conceptual analysis of embodiment. If the virtual is the real, how does it make sense to talk about embodiment?</p>
<p>Embodiment in VR encompasses three interrelated components: agency (the feeling of causing one&#x2019;s actions), body ownership (the feeling that the virtual body is the source of sensations), and self-location (the spatial experience of being inside a body) (<xref ref-type="bibr" rid="B27">Guy et al., 2023</xref>). Embodiment is often tested by asking whether participants can move as they do in the physical world; whether they can walk, bend, or turn naturally. Yet this emphasis on physical mimicry mistakes motor replication for the cognitive activity and decision policies that guide behavior in the world outside the headset, despite evidence that the sense of embodiment is unaffected by alternative physical implementations (<xref ref-type="bibr" rid="B18">Dewez et al., 2020</xref>).</p>
<p>Our central claim is that embodiment depends on the preservation of a functional loop, rather than on anatomical fidelity, defined as the degree to which a virtual interface replicates the biomechanical and sensorimotor properties of the physical body. When a virtual interface preserves the structure of perception, recognition, and feedback, we argue people will act as they normally would, regardless of how their locomotion or other individual affordances are implemented. This would enable functional embodiment that allows VR environments to be more realistic simulacra of, or in some circumstances replacements for, regular interactions in the physical world. Embodiment, therefore, should be assessed at the level of policy - understood here in the reinforcement-learning sense: a learned mapping from perceived states to action selection - rather than at the level of mechanical resemblance. We develop this claim by drawing together formal theory, empirical evidence from immersive VR studies, and a set of testable propositions designed to identify where this sufficiency breaks down.</p>
</sec>
<sec id="S2">
<label>2</label>
<title>The Thin Model as functionalist intermediate theory</title>
<p>To develop our functionalist account of embodiment we draw on the Thin Model, an intermediate theory of the mind that provides a formal account of mental function grounded in four core axioms: materialism, functionalism, reductionism, and recursion (<xref ref-type="bibr" rid="B57">Wright et al., 2023</xref>). It defines an autonomous cognitive system through eight elements: a viable form, guiding tendencies or goals, sensory inputs, a concept store, recognition processes, action tendencies, feedback loops, and, optionally, a sense of self. This structure is designed to be substrate- and species-neutral, enabling analysis of cognition without requiring a human biological frame. As such, it provides a functional scaffold for understanding how technologically mediated environments can support perception, emotion, and decision-making, even when the underlying mechanisms differ from those found in the physical world.</p>
<p>The Thin Model provides a basis for identifying which cognitive functions must be supported for embodiment to occur. In immersive virtual environments, we can map its core functional elements directly onto the interface: sensory inputs correspond to rendered sights, sounds and/or haptic feedbacks; recognition requires stable and discriminative object identity and label clarity; action tendencies emerge through affordances for reaching, grasping, rotating, and inspecting; and feedback is provided through immediate outcomes and cumulative reinforcement. These functions enable the control loop at the heart of situated behavior: perceive &#x2192; recognize &#x2192; inspect &#x2192; decide &#x2192; update. When this perception-action loop is intact, users can enact stable patterns of attention and action regardless of how their movement through space or other affordances are implemented.</p>
<p>Taken together, these mappings suggest embodiment depends on sufficient functionality across the major elements of an autonomous entity, not on full&#x2013;body mimicry. When the perceptual and behavioral elements of the loop are supported, users can sustain coherent policies of attention, evaluation, and action even as for example the physical means of locomotion change. The prediction follows directly: outcomes will remain invariant under interface substitution so long as the functional circuit linking perception, recognition, inspection, decision, and feedback remain intact. Where that circuit is disrupted - by degrading inspection, recognition, or feedback - embodied experience, affect, and behavior should diverge in measurable ways.</p>
<p>The Thin Model can be aligned with predictive processing and active inference frameworks, which have become influential in understanding perception, action, and embodiment in the brain (<xref ref-type="bibr" rid="B52">Tal et al., 2026</xref>). Predictive processing posits that the brain continuously generates hierarchical predictions about sensory inputs and minimizes prediction errors through perception and action (<xref ref-type="bibr" rid="B14">Clark, 2013</xref>; <xref ref-type="bibr" rid="B23">Friston et al., 2017</xref>). Embodied interactions in VR can therefore be understood as the brain&#x2019;s engagement in minimizing discrepancies between expected and actual sensory states, even when those states are generated by artificial stimuli. Within this view, functional embodiment depends not on physical mimicry but on the preservation of sensorimotor contingencies that enable effective error minimization and confidence in action outcomes. This approach is present within the Thin Model in which sensory inputs are recognized through comparison with a concept store, generating feedback and action tendencies; however, the Thin Model is more general and accommodates a broader range of mechanisms than Active Inference alone.</p>
<p>Neural evidence suggests that prediction error signaling engages regions such as the anterior cingulate cortex (ACC), which tracks the mismatch between expected and observed events and supports cognitive control and learning (prediction error signals in ACC, <xref ref-type="bibr" rid="B1">Alexander and Brown, 2019</xref>). Oscillatory dynamics (e.g., beta and theta bands) and functional connectivity changes within sensorimotor and parietal networks have also been linked to agency and action&#x2013;outcome coupling, revealing how neural communication patterns shift when prediction comparisons succeed or fail (<xref ref-type="bibr" rid="B11">Buchholz et al., 2019</xref>). These neural signatures can reveal disruptions in the perception&#x2013;action loop, for example, elevated prediction-error-related activity or atypical oscillatory patterns, even when overt behavior remains stable. Such markers provide a testable basis for neural implementation of minimal embodiment in future fMRI studies, indicating whether the cognitive inference processes that sustain perception, agency, and decision-making are intact.</p>
</sec>
<sec id="S3">
<label>3</label>
<title>Minimal embodiment, defined</title>
<p>Having introduced the Thin Model and demonstrated its conceptual fit, we now propose a more precise definition of minimal embodiment. Our aim is to clarify what must be preserved at the interface level for virtual experience to support real behavior and emotion.</p>
<p>Minimal embodiment can be defined as the preservation of the task-relevant perception&#x2013;action loop described in a general sense by the Thin Model - sensing, recognition, action tendencies, feedback, and optional self-monitoring, instantiated for situated behavior. When these functions are supported by the possession and exercise of sensorimotor skill (<xref ref-type="bibr" rid="B38">No&#x00EB;, 2004</xref>) and appropriate affordances (<xref ref-type="bibr" rid="B26">Gross et al., 2005</xref>), an agent can perceive, evaluate, and act coherently within a virtual environment. Embodiment, on this account, is not a property of bodily duplication but of maintaining a closed functional circuit that allows adaptive behavior to unfold.</p>
<p>A closed functional circuit also contributes to other aspects of a meaningful virtual experience, such as minimal groundedness, the sense of connection to the environment, to others, and to temporal context. Minimal groundedness supports emotional stability, cognitive coherence, and engagement (<xref ref-type="bibr" rid="B20">Eichinger et al., 2022</xref>). When perception, inspection, feedback, and symbolic cues are preserved, users can feel anchored in familiar or socially meaningful contexts, allowing virtual interactions to engage both cognitive and affective dimensions (<xref ref-type="bibr" rid="B58">Xie and Desouza, 2025</xref>).</p>
<p>This framing shifts the focus of VR experiences from physical mechanics to functional and environmental sufficiency. Whether movement is enacted through walking, teleportation, or controller input is secondary to whether the interface affords stable perception, effective recognition, responsive feedback, and cues that connect users to meaningful places, people, and temporal anchors. If these affordances are intact, outcomes such as attention patterns, choice distributions, and affective engagement should remain within the same range that can be observed in physical contexts. Functional sufficiency, rather than sensorimotor completeness, becomes the relevant criterion.</p>
</sec>
<sec id="S4">
<label>4</label>
<title>Empirical anchors</title>
<p>Seen in this light, earlier empirical findings can be read as tests of function rather than of anatomy. The observed invariance of behavior and emotion across different locomotion modes in VR, for example, reflects preservation of the core perception-action loop (<xref ref-type="fig" rid="F1">Figure 1</xref>). Variations in path or movement style are surface differences; what matters is that the cognitive system continues to operate within a coherent feedback structure. This realignment shifts the focus of embodiment research from reproducing form to identifying which functional elements sustain meaningful experience. We now briefly review illustrative literature, considering the impact of VR locomotion on embodiment, the use of immersive VR to enhance realism of shopper behavior, and the role of mental simulation in VR. The empirical examples presented here draw on consumer behavior research, reflecting the origins of our data. However, the functional framework applies wherever immersive systems support perception, recognition, and decision-making - including training, simulation, and entertainment contexts.</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption><p>The perception-action loop in immersive virtual environments. The loop instantiates the Thin Model&#x2019;s functional architecture for situated behavior in VR. Functional sufficiency is achieved when VR interface elements adequately support each stage, regardless of how specific affordances (e.g., locomotion) are implemented. Feedback from the update stage can modulate all other stages of processing.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnsys-20-1778604-g001.tif">
<alt-text content-type="machine-generated">Flowchart showing a cycle with five labeled steps: Perceive, Recognize, Inspect, Decide, and Update. Each box contains brief supporting details, and arrows indicate the process flows from Perceive to Recognize, Inspect, Decide, Update, and back to Perceive.</alt-text>
</graphic>
</fig>
<sec id="S4.SS1">
<label>4.1</label>
<title>Embodiment</title>
<p>Several studies have investigated how changes in locomotion method impact perceived embodiment in virtual environments. Embodiment in VR is commonly understood as the extent to which users experience the virtual body as their own, encompassing feelings of body ownership, agency, and self-location. Locomotion techniques are often assumed to play a critical role in this process because they directly link bodily movement to spatial displacement and sensorimotor feedback. Empirical findings, however, suggest that the impact of locomotion on embodiment depends strongly on the perspective adopted in the virtual experience. For instance, <xref ref-type="bibr" rid="B54">Ulrichs et al. (2024)</xref> investigated three locomotion methods in a third person (3PP) virtual reality simulation and found that locomotion through natural movement (arm-swing) outperformed the two alternative methods, head-tilt and joystick. In contrast, studies testing changes to perceived embodiment in a first-person (1PP) virtual simulation using head-mounted displays have consistently shown that there was no difference in embodiment between locomotion methods. These studies tested joystick versus push-pull (<xref ref-type="bibr" rid="B22">Freiwald et al., 2022</xref>), real walking versus walking-in-place, and virtual steering (<xref ref-type="bibr" rid="B18">Dewez et al., 2020</xref>), and assisted cycling locomotion versus active cycling locomotion (<xref ref-type="bibr" rid="B37">Moullec et al., 2023</xref>). While more studies are needed to investigate other more popular locomotion methods, such as instant teleportation, these early findings consistently demonstrated that locomotion is not a primary driver of embodiment in first-person virtual experiences, whereas it plays a more salient role in third-person contexts where the user&#x2019;s relationship to the avatar is externally mediated.</p>
</sec>
<sec id="S4.SS2">
<label>4.2</label>
<title>Behavior</title>
<p>Since studies have collectively failed to show that physical mimicry (locomotion) impacts embodiment, there are most likely no changes in user behavior to be expected. In early comparisons between desktop and immersive VR, consistent increases in presence proxies and perceived naturalness were observed when participants engaged with the environment through a head-mounted display and tracked controllers (<xref ref-type="bibr" rid="B45">Schnack et al., 2019</xref>). These findings suggest that immersive VR supports the necessary perceptual and interactive conditions to enable the full perception-action loop. When this loop is intact, participants behave in line with well-established shopper patterns (<xref ref-type="bibr" rid="B46">Schnack et al., 2020</xref>); purchase rates increase with shelf height, private-label products receive a plausible share of the basket, and unplanned purchases occur at realistic frequencies. These effects mirror physical-world observations and suggest that participants are not merely playing through a simulation but enacting familiar policies of attention, evaluation, and choice.</p>
<p>In a controlled comparison between motion-tracked walking and instant teleportation within immersive VR, participants followed different paths through the environment but produced statistically equivalent outcomes (<xref ref-type="bibr" rid="B47">Schnack et al., 2021a</xref>). Heatmaps revealed predictable differences in movement - such as central aisle bias among teleporting users - but these variations did not affect key behavioral or emotional measures. Basket size, trip duration, total spend, unplanned purchases, and uptake of unfamiliar brands all remained stable across locomotion types, and a subsequent study found no moderation by shopper personality (<xref ref-type="bibr" rid="B44">Schnack et al., 2021b</xref>). EEG traces of engagement, excitement, and stress also showed no significant differences. These findings suggest that when the functional loop is preserved - particularly recognition, inspection, and decision-feedback - changes to gross locomotor input do not alter underlying policies of action. Other researchers also found that locomotion had no impact on telepresence (<xref ref-type="bibr" rid="B51">Soler-Dom&#x00ED;nguez et al., 2020</xref>).</p>
</sec>
<sec id="S4.SS3">
<label>4.3</label>
<title>Mental simulation</title>
<p>The functional loop implied by the Thin Model has cognitive as well as behavioral elements (<xref ref-type="bibr" rid="B57">Wright et al., 2023</xref>). Some of these have been studied in VR, including mental simulation, the cognitive mechanism through which individuals internally recreate sensory and motor experiences associated with objects or situations, enabling better environmental understanding and facilitating decision-making (<xref ref-type="bibr" rid="B4">Barsalou, 2008</xref>, <xref ref-type="bibr" rid="B5">2020</xref>). VR enhances mental simulation, particularly through first-person perspectives, which amplify its effects by aligning virtual perception with the user&#x2019;s body schema. For instance, <xref ref-type="bibr" rid="B6">Basso et al. (2018)</xref> showed that first-person perspectives increase neural activity in regions associated with rewards (amygdala) and sensory-motor processing (superior parietal gyrus) during observation of hand actions, whereas third-person perspectives elicit weaker engagement. Immersive VR naturally leverages this mechanism: first-person visualization activates neural patterns associated with relevant sensory-motor experiences, allowing users to simulate outcomes and evaluate interactions even without direct physical input (<xref ref-type="bibr" rid="B42">Petit et al., 2019</xref>). These simulations are not mere imagination, they functionally replicate perception&#x2013;action loops active during real-world interaction, guiding subsequent behavior and emotion.</p>
<p>Recent work highlights how Sensory-Enabling Technologies (SETs) can further amplify mental simulation in digital environments. By integrating multisensory cues, such as haptic feedback, temperature, wind, or olfactory signals, VR systems can more fully activate the sensory-motor patterns associated with physical interactions (<xref ref-type="bibr" rid="B42">Petit et al., 2019</xref>). For example, adding touch-enabled controls in VR increases engagement through haptic mental imagery, consequently influencing product evaluations (<xref ref-type="bibr" rid="B16">Cowan et al., 2021</xref>), while scent cues strengthen anticipatory emotions and states of flow, strengthening brand perceptions (<xref ref-type="bibr" rid="B21">Flavi&#x00E1;n et al., 2021</xref>; <xref ref-type="bibr" rid="B15">Cowan et al., 2023</xref>).</p>
<p>The fidelity of these simulations depends on multisensory congruence. Just as synchronous visual and tactile stimulation induces self-attribution in the rubber hand illusion (<xref ref-type="bibr" rid="B8">Botvinick and Cohen, 1998</xref>; <xref ref-type="bibr" rid="B19">Ehrsson et al., 2004</xref>), VR environments can manipulate body ownership, self-location, and interoception through synchronized cues, producing body transfer illusions where users perceive avatars as extensions of their own bodies. Self-relevant avatars are particularly effective in strengthening these functional loops (<xref ref-type="bibr" rid="B40">Petit and Velasco, 2026</xref>). <xref ref-type="bibr" rid="B49">Seo et al. (2017)</xref> found that participants exposed to avatars bearing their own faces experienced higher self-presence, both subjectively (self-report) and physiologically (EEG). Users paid more attention to self-relevant faces, reported greater identification, and exhibited neural markers of enhanced cognitive engagement. This illustrates that functional relevance to the user, not anatomical realism, is again the critical factor in sustaining the perception&#x2013;action loop.</p>
</sec>
</sec>
<sec id="S5">
<label>5</label>
<title>Implications for design and validity</title>
<p>This functional framing reshapes design priorities. If locomotion realism is not essential, but inspection fidelity is, then resources should flow accordingly. High-resolution labels, stable object identity, intuitive hand presence, and responsive object manipulation contribute directly to the perception&#x2013;action loop (see, for instance; <xref ref-type="bibr" rid="B29">Jang et al., 2024</xref>; <xref ref-type="bibr" rid="B35">Luangrath et al., 2022</xref>; <xref ref-type="bibr" rid="B59">Xionghui et al., 2024</xref>). Avatars might be optimized to strengthen embodiment through behavioral expressiveness, or self-relevance, enhancing attention, engagement, and functional interaction, although avatar realism alone is insufficient to affect perceptions of virtual body ownership (<xref ref-type="bibr" rid="B36">Mal et al., 2024</xref>).</p>
<p>Designers can further leverage SETs to reinforce these cognitive loops. Multisensory enhancements, such as haptic gestures, tactile surfaces, olfactory cues, or wind effects, can enrich inspection and interaction, making virtual products feel more tangible and improving anticipatory evaluation (<xref ref-type="bibr" rid="B2">Amini Gougeh and Falk, 2023</xref>; <xref ref-type="bibr" rid="B13">Chen and Tsai, 2025</xref>). These interventions are particularly effective when visual cues alone are insufficient to support accurate mental simulation or when virtual experiences must align with real-world expectations (<xref ref-type="bibr" rid="B17">de Vries et al., 2018</xref>; <xref ref-type="bibr" rid="B15">Cowan et al., 2023</xref>).</p>
<p>The invariance of behavioral and emotional outcomes across different locomotion methods suggests that full-body motion fidelity is not always required for valid immersive experiences. Where the perception&#x2013;action loop is preserved, teleportation can serve as a practical and effective means of navigation. Locomotion platforms may improve realism or comfort for specific users or tasks, but they are not necessary to support the functional criteria for embodiment (<xref ref-type="bibr" rid="B18">Dewez et al., 2020</xref>). Simulating locomotion by walking with full-body tracking offers diminishing returns when these proximal affordances are already in place.</p>
<p>The functional stability observed in immersive VR supports its use in behavioral research, market simulations, and applied training contexts. When the system preserves the relevant control loop, users enact decision policies consistent with physical-world patterns. This strengthens the case for immersive VR as a valid platform for studying situated cognition, provided the limits of sufficiency are clearly understood and empirically tested.</p>
</sec>
<sec id="S6">
<label>6</label>
<title>Boundary conditions (functional breakdown)</title>
<p>As environments grow in scale or complexity, locomotion constraints can begin to shape what participants see and attend to <xref ref-type="bibr" rid="B24">Gabay and Schonberg (2025)</xref>. In large virtual stores or settings with high product novelty, teleportation may reduce shelf exposure and truncate exploration paths. This can shift decision outcomes by limiting the information available for recognition and evaluation. In such cases, the perception&#x2013;action loop may remain partially intact but insufficiently exercised, leading to measurable deviations in unplanned purchases or brand switching. Functional sufficiency, here, becomes a question of reach; not in a motor sense, but in terms of what the system enables users to encounter. One solution &#x2013; or comparative test condition - could be to fix targets on the map and allow the user to only teleport to adjacent targets (<xref ref-type="bibr" rid="B22">Freiwald et al., 2022</xref>).</p>
<p>Embodied decision-making also depends on effective object recognition, which is impacted by factors such as the level of control over a virtual object (<xref ref-type="bibr" rid="B55">Wang and Datta, 2010</xref>). This control affords the user&#x2019;s ability to closely inspect an object and retrieve object-specific information relevant for decision-making. However, when mechanical inspection features such as rotation and zoom features are degraded, the object recognition component of the loop is impaired by reducing interactivity. Several studies have shown that this degradation in interactivity in turn negatively impacts user behavior (<xref ref-type="bibr" rid="B30">Kang et al., 2020</xref>; <xref ref-type="bibr" rid="B32">Kim et al., 2021</xref>).</p>
<p>The same issue can occur when the visual representation of virtual products is suboptimal. For instance, situations when label clarity is degraded can lead to a reduction in vividness and readability, which in turn reduces a user&#x2019;s affordance of object recognition. While <xref ref-type="bibr" rid="B30">Kang et al. (2020)</xref> found no effect of graphics quality on informativeness or decision-making, other researchers did find an existing relationship between graphical quality, perceived vividness, spatial presentation (2D vs. 3D) and decision-making/user behavior (<xref ref-type="bibr" rid="B32">Kim et al., 2021</xref>).</p>
<p>Object recognition further depends on the context of the environmental cues (<xref ref-type="bibr" rid="B39">Oliva and Torralba, 2007</xref>), requiring thoughtful planning around the presence of surrounding objects. In immersive store contexts, impaired object recognition is likely to affect the uptake of unfamiliar brands, reduce user confidence, and lower presence ratings (<xref ref-type="bibr" rid="B32">Kim et al., 2021</xref>). These effects are not artifacts of interface quality <italic>per se</italic>, but direct consequences of interrupting the cognitive processes that support value inference and choice. Inspection is a bottleneck for cognition; when lost, the system no longer supports the responses it is meant to elicit.</p>
<p>Importantly, the type of product category shapes which inspection cues are most relevant (<xref ref-type="bibr" rid="B7">Bigne et al., 2024</xref>). For functional objects, such as packaged products, precise manipulation and detailed views, such as zooming or rotating, are critical to simulate assessment of suitability and perceived risk. In contrast, for experiential products like served foods, the broader context of the environment and first-person visualization play a more significant role than detailed object interaction. In these cases, providing a familiar or contextually rich VR environment may enhance mental simulation more effectively than improving mechanical interaction alone. Immersive retail design should therefore calibrate product and environmental cues to product characteristics, ensuring that inspection and interaction support the specific product type requirements (<xref ref-type="bibr" rid="B41">Petit et al., 2022</xref>).</p>
<p>The framing of the task also matters. Participants given a goal-directed assignment may experience heightened stress or urgency compared to those in exploratory or recreational conditions, even when the environment is the same. In previous studies, engagement and excitement showed consistent early peaks (<xref ref-type="bibr" rid="B53">Tian et al., 2022</xref>), but stress trajectories varied with the framing of the interaction (<xref ref-type="bibr" rid="B3">Ban et al., 2024</xref>). This suggests that the feedback element of the loop is modulated not only by interface mechanics but by perceived intention and consequence. Here, embodiment can be expected to shift with task valence - not because the loop breaks, but because it recalibrates.</p>
</sec>
<sec id="S7">
<label>7</label>
<title>Research program: theory-driven probes</title>
<p>To test the functionalist account of embodiment, we propose a focused set of experimental manipulations, each targeting a distinct component of the perception&#x2013;action loop. The aim is not to evaluate immersive technology in general, but to probe which interface elements are necessary for stable policies of attention, emotion, and behavior. Each test examines the boundary between functional sufficiency and breakdown, guided by the Thin Model&#x2019;s mapping of sensing, recognition, inspection, and feedback.</p>
<p><bold>Test 1: Degrade Inspection</bold></p>
<p>Reduce the clarity of product labels and disable object rotation. This targets recognition and inspection, and should directly affect uptake of unfamiliar brands, increase decision latency, and lower presence scores. If participants cannot resolve the relevant distinctions, policy formation should degrade.</p>
<p><bold>Test 2: Expand Scale</bold></p>
<p>Increase store size and assortment novelty. This tests whether locomotion mechanisms that limit spatial exposure (e.g., teleportation) compromise the availability of perceptual input. If exposure falls below a functional threshold, outcomes such as unplanned purchases and diversity of choice should diverge from behavioral benchmarks.</p>
<p><bold>Test 3: Manipulate Multisensory-inputs</bold></p>
<p>Compare conditions with minimal sensory input (visual-only) versus enriched multisensory environments using SETs. This can include ambient audio, haptic feedback, temperature changes, wind, and scent cues, while keeping visuals constant. The goal is to test whether enhanced sensory congruence strengthens mental simulation, presence, and engagement (e.g., anticipatory imagery of product use or consumption outcomes). We predict richer multisensory input will increase presence and immersive experience, supporting the perception&#x2013;action loop, but may not change core behavioral outcomes unless the additional cues influence perceived salience or product relevance. This manipulation also allows exploration of which sensory modalities are most critical for sustaining minimal embodiment and effective decision-making.</p>
<p><bold>Test 4: Alter Hand Fidelity</bold></p>
<p>Remove or degrade the ability to grasp, rotate, or reposition items. This affects the physical realization of action tendencies while leaving recognition cues visible. If object handling is critical to embodied evaluation, we expect a drop in product engagement and a shift in choice patterns.</p>
<p><bold>Test 5: Grounding Cues Manipulation</bold></p>
<p>Introduce or remove contextual cues that connect users to familiar places, social actors, or historical references. Measure effects on emotional engagement, perceived stability, and decision confidence. This allows examination of whether groundedness emerges from minimal embodiment, supporting functional engagement.</p>
<p><bold>Test 6: Avatar Customization</bold></p>
<p>Manipulate the degree of avatar personalization and resemblance to the user, from generic avatars to highly self-representative avatars. We predict self-resembling avatars will strengthen embodiment by enhancing perception&#x2013;action loops, increasing emotional engagement, and reinforcing stable behavioral policies. This test examines whether the alignment of virtual self-representation with the user is a functional component of minimal embodiment.</p>
<p>Together, these tests allow for potential falsification of the minimal embodiment hypothesis. Where outcomes shift despite preserved recognition and inspection, the hypothesis is undermined. Where outcomes remain stable despite surface-level interface changes, it gains support.</p>
</sec>
<sec id="S8">
<label>8</label>
<title>A compact measurement stack</title>
<p>In support of these tests, we suggest a specific measurement stack that explicitly distinguishes embodiment from telepresence. This distinction is grounded in the theoretical understanding of the perception&#x2013;action loop: before meaningful assessments of behavioral stability or functional embodiment can be made, the interface must support coherent sensing, recognition, and feedback. In other words, the loop must be operable for embodiment to manifest, making presence a necessary enabling condition rather than a direct indicator of embodied outcomes (<xref ref-type="bibr" rid="B50">Slater, 2009</xref>; <xref ref-type="bibr" rid="B31">Kilteni et al., 2012</xref>).</p>
<p>Established instruments such as the Igroup Presence Questionnaire (IPQ; <xref ref-type="bibr" rid="B48">Schubert et al., 2001</xref>) or the Presence Questionnaire (<xref ref-type="bibr" rid="B56">Witmer and Singer, 1998</xref>) can be used alongside usability measures (e.g., SUS; <xref ref-type="bibr" rid="B10">Brooke, 1996</xref>) to assess ease of movement, naturalness of interaction, and perceived immersion. These measures do not index embodiment <italic>per se</italic> but instead ensure that the participant&#x2019;s perceptual and motor environment is coherent enough for the perception&#x2013;action loop to operate. Only once these prerequisites are met can we interpret subsequent behavioral, physiological, and affective outcomes as reflecting the integrity of embodiment.</p>
<p>Embodiment itself is measured through convergent methods targeting ownership, agency, and self-location. Standardized questionnaires (VEQ; <xref ref-type="bibr" rid="B43">Roth and Latoschik, 2020</xref>) capture subjective experience, while behavioral tasks (response to threat, locomotion, mental imagery; <xref ref-type="bibr" rid="B9">Bourdin et al., 2017</xref>; <xref ref-type="bibr" rid="B34">Lenggenhager et al., 2007</xref>) provide objective evidence. Implicit agency measures such as intentional binding and sensory attenuation (<xref ref-type="bibr" rid="B25">Gentsch and Sch&#x00FC;tz-Bosbach, 2011</xref>) further validate control over actions.</p>
<p>Physiological measures provide complementary insight into the affective state of participants. EEG indicators of engagement, excitement, and stress offer a coarse but useful trace of affective state (<xref ref-type="bibr" rid="B33">Kober et al., 2012</xref>). Similarly, heart rate variability (HRV) and skin conductance (electrodermal activity) can provide additional coarse-grained evidence of arousal and stress responses, while eye-tracking data, including fixation duration and saccade patterns, can also offer insight into attention allocation and cognitive load (<xref ref-type="bibr" rid="B28">Halbig and Latoschik, 2021</xref>). They are best interpreted as secondary evidence that complements behavioral and self-report measures. In our locomotion comparison, the stability of these EEG measures across conditions reinforces the claim that emotional engagement depends on the integrity of the functional loop rather than on the specific form of locomotion.</p>
<p>Spatial metrics such as heatmaps, dwell time, and route entropy capture how participants traverse the environment. These indicators reveal surface differences in navigation while allowing assessment of whether such differences influence exposure or decision outcomes. For example, differences in path length or central-aisle bias may appear in teleportation versus walking conditions, yet if purchase patterns, interaction frequency, and affective states remain constant, this indicates that the underlying behavioral policy, and by extension the embodied function has been preserved. Additional measures such as object interaction counts, inspection times, or virtual gaze-based engagement including pupil dilation can provide finer-grained confirmation of functional loop integrity.</p>
</sec>
<sec id="S9" sec-type="conclusion">
<label>9</label>
<title>Conclusion</title>
<p>Embodiment in virtual environments should be defined by function, not by form. When systems support perception, recognition, inspection, and feedback, they enable behavioral and emotional engagement even when interface mechanics differ from those of the physical world. The Thin Model provides a principled way to identify which functions must be preserved and where substitution may lead to breakdown. This allows us to treat virtual experience not as a lesser analogue but as a cognitively valid domain. If policies remain stable, the experience is real enough to matter, psychologically, practically, and ethically. In designing immersive systems, we should follow the structure of the mind, not the body.</p>
<p>The functional sufficiency criterion extends beyond immersive VR. First-person videogames, desktop simulations, and other mediated environments may support embodiment to the degree that they preserve the perception-action loop. The relevant question is not the level of immersion but whether sensing, recognition, inspection, and feedback remain intact. This generalization invites comparative research across interface types.</p>
<p>Mental simulation is a central function that minimal embodiment must support. It allows users to anticipate outcomes, evaluate interactions, and enact real-world-like policies by internally reenacting sensory and motor experiences. Minimal embodiment should facilitate this process. VR, particularly through first-person perspectives and enriched environmental representations, is well suited to support mental simulation, especially when sensory inputs are coherently integrated, strengthening the perception&#x2013;action loop.</p>
<p>Finally, if immersive systems reliably influence attention, emotion, and decision-making, they demand ethical consideration regardless of their ontological status. The fact that virtual environments are mediated does not exempt them from obligations around informed consent, data minimization, and debriefing. A functionalist account of embodiment reinforces this view: if the system supports meaningful cognition, it must be treated as a site of real psychological engagement.</p>
</sec>
</body>
<back>
<sec id="S10" sec-type="data-availability">
<title>Data availability statement</title>
<p>The original contributions presented in the study are included in the article/supplementary material, further inquiries can be directed to the corresponding author.</p>
</sec>
<sec id="S11" sec-type="author-contributions">
<title>Author contributions</title>
<p>MW: Conceptualization, Supervision, Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing. OP: Conceptualization, Validation, Writing &#x2013; review &#x0026; editing. AS: Validation, Writing &#x2013; review &#x0026; editing.</p>
</sec>
<sec id="S13" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The author(s) declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="S14" sec-type="ai-statement">
<title>Generative AI statement</title>
<p>The author(s) declared that generative AI was used in the creation of this manuscript. We made use of generative AI (ChatGPT 5 Pro) to help refine the structure of the argument and edit the text of the article.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p>
</sec>
<sec id="S15" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Alexander</surname> <given-names>W. H.</given-names></name> <name><surname>Brown</surname> <given-names>J. W.</given-names></name></person-group> (<year>2019</year>). <article-title>The role of the anterior cingulate cortex in prediction error and signaling surprise.</article-title> <source><italic>Top. Cogn. Sci.</italic></source> <volume>11</volume> <fpage>119</fpage>&#x2013;<lpage>135</lpage>. <pub-id pub-id-type="doi">10.1111/tops.12307</pub-id> <pub-id pub-id-type="pmid">29131512</pub-id></mixed-citation></ref>
<ref id="B2"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Amini Gougeh</surname> <given-names>R.</given-names></name> <name><surname>Falk</surname> <given-names>T. H.</given-names></name></person-group> (<year>2023</year>). <article-title>Enhancing motor imagery detection efficacy using multisensory virtual reality priming.</article-title> <source><italic>Front. Neuroergon.</italic></source> <volume>4</volume>:<fpage>1080200</fpage>. <pub-id pub-id-type="doi">10.3389/fnrgo.2023.1080200</pub-id> <pub-id pub-id-type="pmid">38236517</pub-id></mixed-citation></ref>
<ref id="B3"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ban</surname> <given-names>Y.</given-names></name> <name><surname>Inazawa</surname> <given-names>M.</given-names></name> <name><surname>Kato</surname> <given-names>C.</given-names></name> <name><surname>Warisawa</surname> <given-names>S. I.</given-names></name></person-group> (<year>2024</year>). <article-title>VR communication simulation with scripted dialog elicits HPA axis stress.</article-title> <source><italic>Front. Virtual Reality</italic></source> <volume>4</volume>:<fpage>1302720</fpage>. <pub-id pub-id-type="doi">10.3389/frvir.2023.1302720</pub-id></mixed-citation></ref>
<ref id="B4"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Barsalou</surname> <given-names>L. W.</given-names></name></person-group> (<year>2008</year>). <article-title>Grounded cognition.</article-title> <source><italic>Annu. Rev. Psychol.</italic></source> <volume>59</volume> <fpage>617</fpage>&#x2013;<lpage>645</lpage>. <pub-id pub-id-type="doi">10.1146/annurev.psych.59.103006.093639</pub-id> <pub-id pub-id-type="pmid">17705682</pub-id></mixed-citation></ref>
<ref id="B5"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Barsalou</surname> <given-names>L. W.</given-names></name></person-group> (<year>2020</year>). <article-title>Challenges and opportunities for grounding cognition.</article-title> <source><italic>J. Cogn.</italic></source> <volume>3</volume>:<fpage>31</fpage>. <pub-id pub-id-type="doi">10.5334/joc.116</pub-id> <pub-id pub-id-type="pmid">33043241</pub-id></mixed-citation></ref>
<ref id="B6"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Basso</surname> <given-names>F.</given-names></name> <name><surname>Petit</surname> <given-names>O.</given-names></name> <name><surname>Le Bellu</surname> <given-names>S.</given-names></name> <name><surname>Lahlou</surname> <given-names>S.</given-names></name> <name><surname>Cancel</surname> <given-names>A.</given-names></name> <name><surname>Anton</surname> <given-names>J. L.</given-names></name></person-group> (<year>2018</year>). <article-title>Taste at first (person) sight: Visual perspective modulates brain activity implicitly associated with viewing unhealthy but not healthy foods.</article-title> <source><italic>Appetite</italic></source> <volume>128</volume> <fpage>242</fpage>&#x2013;<lpage>254</lpage>. <pub-id pub-id-type="doi">10.1016/j.appet.2018.06.009</pub-id> <pub-id pub-id-type="pmid">29906489</pub-id></mixed-citation></ref>
<ref id="B7"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Bigne</surname> <given-names>E.</given-names></name> <name><surname>Simonetti</surname> <given-names>A.</given-names></name> <name><surname>Guixeres</surname> <given-names>J.</given-names></name> <name><surname>Alcaniz</surname> <given-names>M.</given-names></name></person-group> (<year>2024</year>). <article-title>Visual attention and product interaction: A neuroscientific study on purchase across two product categories in a virtual store.</article-title> <source><italic>Int. J. Retail Distribution Manag.</italic></source> <volume>52</volume> <fpage>389</fpage>&#x2013;<lpage>406</lpage>. <pub-id pub-id-type="doi">10.1108/IJRDM-02-2023-0067</pub-id></mixed-citation></ref>
<ref id="B8"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Botvinick</surname> <given-names>M.</given-names></name> <name><surname>Cohen</surname> <given-names>J.</given-names></name></person-group> (<year>1998</year>). <article-title>Rubber hands &#x201C;feel&#x201D; touch that eyes see.</article-title> <source><italic>Nature</italic></source> <volume>391</volume>:<fpage>756</fpage>. <pub-id pub-id-type="doi">10.1038/35784</pub-id> <pub-id pub-id-type="pmid">9486643</pub-id></mixed-citation></ref>
<ref id="B9"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Bourdin</surname> <given-names>P.</given-names></name> <name><surname>Barberia</surname> <given-names>I.</given-names></name> <name><surname>Oliva</surname> <given-names>R.</given-names></name> <name><surname>Slater</surname> <given-names>M.</given-names></name></person-group> (<year>2017</year>). <article-title>A virtual out-of-body experience reduces fear of death.</article-title> <source><italic>PLoS One</italic></source> <volume>12</volume>:<fpage>e0169343</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0169343</pub-id> <pub-id pub-id-type="pmid">28068368</pub-id></mixed-citation></ref>
<ref id="B10"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Brooke</surname> <given-names>J.</given-names></name></person-group> (<year>1996</year>). <article-title>&#x201C;SUS: A &#x201C;quick and dirty&#x201D; usability scale,&#x201D;</article-title> in <source><italic>Usability Evaluation in Industry</italic></source>, <role>eds</role> <person-group person-group-type="editor"><name><surname>Jordan</surname> <given-names>P. W.</given-names></name> <name><surname>Thomas</surname> <given-names>B.</given-names></name> <name><surname>McClelland</surname> <given-names>I. L.</given-names></name> <name><surname>Weerdmeester</surname> <given-names>B. A.</given-names></name></person-group> (<publisher-loc>London</publisher-loc>: <publisher-name>Taylor &#x0026; Francis</publisher-name>), <fpage>189</fpage>&#x2013;<lpage>194</lpage>.</mixed-citation></ref>
<ref id="B11"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Buchholz</surname> <given-names>V. N.</given-names></name> <name><surname>David</surname> <given-names>N.</given-names></name> <name><surname>Sengelmann</surname> <given-names>M.</given-names></name> <name><surname>Engel</surname> <given-names>A. K.</given-names></name></person-group> (<year>2019</year>). <article-title>Belief of agency changes dynamics in sensorimotor networks.</article-title> <source><italic>Sci. Rep.</italic></source> <volume>9</volume>:<fpage>1995</fpage>. <pub-id pub-id-type="doi">10.1038/s41598-018-37912-w</pub-id> <pub-id pub-id-type="pmid">30760743</pub-id></mixed-citation></ref>
<ref id="B12"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Chalmers</surname> <given-names>D. J.</given-names></name></person-group> (<year>2022</year>). <source><italic>Reality+: Virtual Worlds and the Problems of Philosophy.</italic></source> <publisher-loc>New York</publisher-loc>: <publisher-name>Penguin</publisher-name>.</mixed-citation></ref>
<ref id="B13"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Chen</surname> <given-names>Y. T.</given-names></name> <name><surname>Tsai</surname> <given-names>M. S.</given-names></name></person-group> (<year>2025</year>). &#x201C;<article-title>The impact of wind experience on VR game immersion</article-title>,&#x201D; in <source><italic>Virtual, Augmented and Mixed Reality. HCII 2025. Lecture Notes in Computer Science, 15790</italic></source>, <role>eds</role> <person-group person-group-type="editor"><name><surname>Chen</surname> <given-names>J. Y. C.</given-names></name> <name><surname>Fragomeni</surname> <given-names>G.</given-names></name></person-group> (<publisher-loc>Cham</publisher-loc>: <publisher-name>Springer</publisher-name>).</mixed-citation></ref>
<ref id="B14"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Clark</surname> <given-names>A.</given-names></name></person-group> (<year>2013</year>). <article-title>Whatever next? Predictive brains, situated agents, and the future of cognitive science.</article-title> <source><italic>Behav. Brain Sci.</italic></source> <volume>36</volume> <fpage>181</fpage>&#x2013;<lpage>204</lpage>. <pub-id pub-id-type="doi">10.1017/S0140525X12000477</pub-id> <pub-id pub-id-type="pmid">23663408</pub-id></mixed-citation></ref>
<ref id="B15"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Cowan</surname> <given-names>K.</given-names></name> <name><surname>Ketron</surname> <given-names>S.</given-names></name> <name><surname>Kostyk</surname> <given-names>A.</given-names></name> <name><surname>Kristofferson</surname> <given-names>K.</given-names></name></person-group> (<year>2023</year>). <article-title>Can you smell the (virtual) roses? The influence of olfactory cues in virtual reality on immersion and positive brand responses.</article-title> <source><italic>J. Retailing</italic></source> <volume>99</volume> <fpage>385</fpage>&#x2013;<lpage>399</lpage>. <pub-id pub-id-type="doi">10.1016/j.jretai.2023.07.004</pub-id></mixed-citation></ref>
<ref id="B16"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Cowan</surname> <given-names>K.</given-names></name> <name><surname>Spielmann</surname> <given-names>N.</given-names></name> <name><surname>Horn</surname> <given-names>E.</given-names></name> <name><surname>Griffart</surname> <given-names>C.</given-names></name></person-group> (<year>2021</year>). <article-title>Perception is reality. How digital retail environments influence brand perceptions through presence.</article-title> <source><italic>J. Bus. Res.</italic></source> <volume>123</volume> <fpage>86</fpage>&#x2013;<lpage>96</lpage>. <pub-id pub-id-type="doi">10.1016/j.jbusres.2020.09.058</pub-id></mixed-citation></ref>
<ref id="B17"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>de Vries</surname> <given-names>R.</given-names></name> <name><surname>Jager</surname> <given-names>G.</given-names></name> <name><surname>Tijssen</surname> <given-names>I.</given-names></name> <name><surname>Zandstra</surname> <given-names>E. H.</given-names></name></person-group> (<year>2018</year>). <article-title>Shopping for products in a virtual world: Why haptics and visuals are equally important in shaping consumer perceptions and attitudes.</article-title> <source><italic>Food Quality Preference</italic></source> <volume>66</volume> <fpage>64</fpage>&#x2013;<lpage>75</lpage>. <pub-id pub-id-type="doi">10.1016/j.foodqual.2018.01.005</pub-id></mixed-citation></ref>
<ref id="B18"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Dewez</surname> <given-names>D.</given-names></name> <name><surname>Hoyet</surname> <given-names>L.</given-names></name> <name><surname>L&#x00E9;cuyer</surname> <given-names>A.</given-names></name> <name><surname>Argelaguet</surname> <given-names>F.</given-names></name></person-group> (<year>2020</year>). &#x201C;<article-title>Studying the inter-relation between locomotion techniques and embodiment in virtual reality</article-title>,&#x201D; in <source><italic>Proceedings of the 2020 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)</italic></source>, (<publisher-loc>Piscataway, NJ</publisher-loc>: <publisher-name>IEEE</publisher-name>), <fpage>452</fpage>&#x2013;<lpage>461</lpage>.</mixed-citation></ref>
<ref id="B19"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ehrsson</surname> <given-names>H. H.</given-names></name> <name><surname>Spence</surname> <given-names>C.</given-names></name> <name><surname>Passingham</surname> <given-names>R. E.</given-names></name></person-group> (<year>2004</year>). <article-title>That&#x2019;s my hand! Activity in premotor cortex reflects feeling of ownership of a limb.</article-title> <source><italic>Science</italic></source> <volume>305</volume> <fpage>875</fpage>&#x2013;<lpage>877</lpage>. <pub-id pub-id-type="doi">10.1126/science.1097011</pub-id> <pub-id pub-id-type="pmid">15232072</pub-id></mixed-citation></ref>
<ref id="B20"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Eichinger</surname> <given-names>I.</given-names></name> <name><surname>Schreier</surname> <given-names>M.</given-names></name> <name><surname>van Osselaer</surname> <given-names>S. M.</given-names></name></person-group> (<year>2022</year>). <article-title>Connecting to place, people, and past: How products make us feel grounded.</article-title> <source><italic>J. Market.</italic></source> <volume>86</volume> <fpage>1</fpage>&#x2013;<lpage>16</lpage>. <pub-id pub-id-type="doi">10.1177/00222429211027469</pub-id></mixed-citation></ref>
<ref id="B21"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Flavi&#x00E1;n</surname> <given-names>C.</given-names></name> <name><surname>Ib&#x00E1;&#x00F1;ez-S&#x00E1;nchez</surname> <given-names>S.</given-names></name> <name><surname>Or&#x00FA;s</surname> <given-names>C.</given-names></name></person-group> (<year>2021</year>). <article-title>The influence of scent on virtual reality experiences: The role of aroma-content congruence.</article-title> <source><italic>J. Bus. Res.</italic></source> <volume>123</volume> <fpage>289</fpage>&#x2013;<lpage>301</lpage>. <pub-id pub-id-type="doi">10.1016/j.jbusres.2020.09.036</pub-id></mixed-citation></ref>
<ref id="B22"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Freiwald</surname> <given-names>J. P.</given-names></name> <name><surname>Schmidt</surname> <given-names>S.</given-names></name> <name><surname>Riecke</surname> <given-names>B. E.</given-names></name> <name><surname>Steinicke</surname> <given-names>F.</given-names></name></person-group> (<year>2022</year>). <article-title>The continuity of locomotion: Rethinking conventions for locomotion and its visualization in shared virtual reality spaces.</article-title> <source><italic>ACM Trans. Graph.</italic></source> <volume>41</volume> <fpage>1</fpage>&#x2013;<lpage>14</lpage>. <pub-id pub-id-type="doi">10.1145/3550454.3555522</pub-id></mixed-citation></ref>
<ref id="B23"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Friston</surname> <given-names>K.</given-names></name> <name><surname>FitzGerald</surname> <given-names>T.</given-names></name> <name><surname>Rigoli</surname> <given-names>F.</given-names></name> <name><surname>Schwartenbeck</surname> <given-names>P.</given-names></name> <name><surname>Doherty</surname> <given-names>J.</given-names></name> <name><surname>Pezzulo</surname> <given-names>G.</given-names></name></person-group> (<year>2017</year>). <article-title>Active inference: A process theory.</article-title> <source><italic>Neural Comput.</italic></source> <volume>29</volume> <fpage>1</fpage>&#x2013;<lpage>49</lpage>. <pub-id pub-id-type="doi">10.1162/NECO_a_00912</pub-id> <pub-id pub-id-type="pmid">27870614</pub-id></mixed-citation></ref>
<ref id="B24"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Gabay</surname> <given-names>M.</given-names></name> <name><surname>Schonberg</surname> <given-names>T.</given-names></name></person-group> (<year>2025</year>). <article-title>The effect of virtual reality modality level of immersion and locomotion on spatial learning and gaze measures.</article-title> <source><italic>bioRxiv</italic></source> <comment>[Preprint]</comment> <pub-id pub-id-type="doi">10.1101/2025.03.09.642230</pub-id></mixed-citation></ref>
<ref id="B25"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Gentsch</surname> <given-names>A.</given-names></name> <name><surname>Sch&#x00FC;tz-Bosbach</surname> <given-names>S.</given-names></name></person-group> (<year>2011</year>). <article-title>I did it: Unconscious expectation of sensory consequences modulates the experience of self-agency and its functional signature.</article-title> <source><italic>J. Cogn. Neurosci.</italic></source> <volume>23</volume> <fpage>3817</fpage>&#x2013;<lpage>3828</lpage>. <pub-id pub-id-type="doi">10.1162/jocn_a_00012</pub-id> <pub-id pub-id-type="pmid">21452945</pub-id></mixed-citation></ref>
<ref id="B26"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Gross</surname> <given-names>D. C.</given-names></name> <name><surname>Stanney</surname> <given-names>K. M.</given-names></name> <name><surname>Cohn</surname> <given-names>L. J.</given-names></name></person-group> (<year>2005</year>). <article-title>Evoking affordances in virtual environments via sensory-stimuli substitution.</article-title> <source><italic>Presence Teleoperators Virtual Environ.</italic></source> <volume>14</volume> <fpage>482</fpage>&#x2013;<lpage>491</lpage>. <pub-id pub-id-type="doi">10.1162/105474605774785244</pub-id></mixed-citation></ref>
<ref id="B27"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Guy</surname> <given-names>M.</given-names></name> <name><surname>Normand</surname> <given-names>J. M.</given-names></name> <name><surname>Jeunet-Kelway</surname> <given-names>C.</given-names></name> <name><surname>Moreau</surname> <given-names>G.</given-names></name></person-group> (<year>2023</year>). <article-title>The sense of embodiment in virtual reality and its assessment methods.</article-title> <source><italic>Front. Virtual Reality</italic></source> <volume>4</volume>:<fpage>1141683</fpage>. <pub-id pub-id-type="doi">10.3389/frvir.2023.1141683</pub-id></mixed-citation></ref>
<ref id="B28"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Halbig</surname> <given-names>A.</given-names></name> <name><surname>Latoschik</surname> <given-names>M. E.</given-names></name></person-group> (<year>2021</year>). <article-title>A systematic review of physiological measurements, factors, methods and applications in virtual reality.</article-title> <source><italic>Front. Virtual Reality</italic></source> <volume>2</volume>:<fpage>694567</fpage>. <pub-id pub-id-type="doi">10.3389/frvir.2021.694567</pub-id></mixed-citation></ref>
<ref id="B29"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Jang</surname> <given-names>H.</given-names></name> <name><surname>Kim</surname> <given-names>J.</given-names></name> <name><surname>Lee</surname> <given-names>J.</given-names></name></person-group> (<year>2024</year>). <article-title>Effects of congruent multisensory feedback on the perception and performance of virtual reality hand-retargeted interaction.</article-title> <source><italic>IEEE Access.</italic></source> <volume>12</volume> <fpage>119789</fpage>&#x2013;<lpage>119802</lpage>. <pub-id pub-id-type="doi">10.1109/ACCESS.2024.3450512</pub-id></mixed-citation></ref>
<ref id="B30"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kang</surname> <given-names>H. J.</given-names></name> <name><surname>Shin</surname> <given-names>J. H.</given-names></name> <name><surname>Ponto</surname> <given-names>K.</given-names></name></person-group> (<year>2020</year>). <article-title>How 3D virtual reality stores can shape consumer purchase decisions: The roles of informativeness and playfulness.</article-title> <source><italic>J. Interactive Mark.</italic></source> <volume>49</volume> <fpage>70</fpage>&#x2013;<lpage>85</lpage>. <pub-id pub-id-type="doi">10.1016/j.intmar.2019.07.002</pub-id></mixed-citation></ref>
<ref id="B31"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kilteni</surname> <given-names>K.</given-names></name> <name><surname>Groten</surname> <given-names>R.</given-names></name> <name><surname>Slater</surname> <given-names>M.</given-names></name></person-group> (<year>2012</year>). <article-title>The sense of embodiment in virtual reality.</article-title> <source><italic>Presence Teleoperators Virtual Environ.</italic></source> <volume>21</volume> <fpage>373</fpage>&#x2013;<lpage>387</lpage>. <pub-id pub-id-type="doi">10.1162/PRES_a_00124</pub-id></mixed-citation></ref>
<ref id="B32"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kim</surname> <given-names>J. H.</given-names></name> <name><surname>Kim</surname> <given-names>M.</given-names></name> <name><surname>Park</surname> <given-names>M.</given-names></name> <name><surname>Yoo</surname> <given-names>J.</given-names></name></person-group> (<year>2021</year>). <article-title>How interactivity and vividness influence consumer virtual reality shopping experience: The mediating role of telepresence.</article-title> <source><italic>J. Res. Interactive Mark.</italic></source> <volume>15</volume> <fpage>502</fpage>&#x2013;<lpage>525</lpage>. <pub-id pub-id-type="doi">10.1108/JRIM-07-2020-0148</pub-id></mixed-citation></ref>
<ref id="B33"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kober</surname> <given-names>S. E.</given-names></name> <name><surname>Kurzmann</surname> <given-names>J.</given-names></name> <name><surname>Neuper</surname> <given-names>C.</given-names></name></person-group> (<year>2012</year>). <article-title>Cortical correlate of spatial presence in 2D and 3D interactive virtual reality: An EEG study.</article-title> <source><italic>Int. J. Psychophysiol.</italic></source> <volume>83</volume> <fpage>365</fpage>&#x2013;<lpage>374</lpage>. <pub-id pub-id-type="doi">10.1016/j.ijpsycho.2011.12.003</pub-id> <pub-id pub-id-type="pmid">22206906</pub-id></mixed-citation></ref>
<ref id="B34"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Lenggenhager</surname> <given-names>B.</given-names></name> <name><surname>Tadi</surname> <given-names>T.</given-names></name> <name><surname>Metzinger</surname> <given-names>T.</given-names></name> <name><surname>Blanke</surname> <given-names>O.</given-names></name></person-group> (<year>2007</year>). <article-title>Video ergo sum: Manipulating bodily self-consciousness.</article-title> <source><italic>Science</italic></source> <volume>317</volume> <fpage>1096</fpage>&#x2013;<lpage>1099</lpage>. <pub-id pub-id-type="doi">10.1126/science.1143439</pub-id> <pub-id pub-id-type="pmid">17717189</pub-id></mixed-citation></ref>
<ref id="B35"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Luangrath</surname> <given-names>A. W.</given-names></name> <name><surname>Peck</surname> <given-names>J.</given-names></name> <name><surname>Hedgcock</surname> <given-names>W.</given-names></name> <name><surname>Xu</surname> <given-names>Y.</given-names></name></person-group> (<year>2022</year>). <article-title>Observing product touch: The vicarious haptic effect in digital marketing and virtual reality.</article-title> <source><italic>J. Mark. Res.</italic></source> <volume>59</volume> <fpage>306</fpage>&#x2013;<lpage>326</lpage>. <pub-id pub-id-type="doi">10.1177/00222437211059540</pub-id></mixed-citation></ref>
<ref id="B36"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Mal</surname> <given-names>D.</given-names></name> <name><surname>D&#x00F6;llinger</surname> <given-names>N.</given-names></name> <name><surname>Wolf</surname> <given-names>E.</given-names></name> <name><surname>Wenninger</surname> <given-names>S.</given-names></name> <name><surname>Botsch</surname> <given-names>M.</given-names></name> <name><surname>Wienrich</surname> <given-names>C.</given-names></name><etal/></person-group> (<year>2024</year>). <article-title>Am I the odd one? Exploring (in) congruencies in the realism of avatars and virtual others in virtual reality.</article-title> <source><italic>Front. Virtual Reality</italic></source> <volume>5</volume>:<fpage>1417066</fpage>. <pub-id pub-id-type="doi">10.3389/frvir.2024.1417066</pub-id></mixed-citation></ref>
<ref id="B37"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Moullec</surname> <given-names>Y.</given-names></name> <name><surname>Cogn&#x00E9;</surname> <given-names>M.</given-names></name> <name><surname>Saint-Aubert</surname> <given-names>J.</given-names></name> <name><surname>L&#x00E9;cuyer</surname> <given-names>A.</given-names></name></person-group> (<year>2023</year>). <article-title>Assisted walking-in-place: Introducing assisted motion to walking-by-cycling in embodied virtual reality.</article-title> <source><italic>IEEE Trans. Vis. Comput. Graph.</italic></source> <volume>29</volume> <fpage>2796</fpage>&#x2013;<lpage>2805</lpage>. <pub-id pub-id-type="doi">10.1109/TVCG.2023.3247070</pub-id> <pub-id pub-id-type="pmid">37015135</pub-id></mixed-citation></ref>
<ref id="B38"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>No&#x00EB;</surname> <given-names>A.</given-names></name></person-group> (<year>2004</year>). <source><italic>Action in Perception.</italic></source> <publisher-loc>Cambridge, MA</publisher-loc>: <publisher-name>MIT Press</publisher-name>.</mixed-citation></ref>
<ref id="B39"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Oliva</surname> <given-names>A.</given-names></name> <name><surname>Torralba</surname> <given-names>A.</given-names></name></person-group> (<year>2007</year>). <article-title>The role of context in object recognition.</article-title> <source><italic>Trends Cogn. Sci.</italic></source> <volume>11</volume> <fpage>520</fpage>&#x2013;<lpage>527</lpage>. <pub-id pub-id-type="doi">10.1016/j.tics.2007.09.009</pub-id> <pub-id pub-id-type="pmid">18024143</pub-id></mixed-citation></ref>
<ref id="B40"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Petit</surname> <given-names>O.</given-names></name> <name><surname>Velasco</surname> <given-names>C.</given-names></name></person-group> (<year>2026</year>). <source><italic>Consumer Consciousness: From Sensation to Digital Perception.</italic></source> <publisher-loc>Oxford</publisher-loc>: <publisher-name>Oxford University Press</publisher-name>.</mixed-citation></ref>
<ref id="B41"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Petit</surname> <given-names>O.</given-names></name> <name><surname>Javornik</surname> <given-names>A.</given-names></name> <name><surname>Velasco</surname> <given-names>C.</given-names></name></person-group> (<year>2022</year>). <article-title>We eat first with our (digital) eyes: Enhancing mental simulation of eating experiences via visual-enabling technologies.</article-title> <source><italic>J. Retailing</italic></source> <volume>98</volume> <fpage>277</fpage>&#x2013;<lpage>293</lpage>. <pub-id pub-id-type="doi">10.1016/j.jretai.2021.04.003</pub-id></mixed-citation></ref>
<ref id="B42"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Petit</surname> <given-names>O.</given-names></name> <name><surname>Velasco</surname> <given-names>C.</given-names></name> <name><surname>Spence</surname> <given-names>C.</given-names></name></person-group> (<year>2019</year>). <article-title>Digital sensory marketing: Integrating new technologies into multisensory online experience.</article-title> <source><italic>J. Interactive Mark.</italic></source> <volume>45</volume> <fpage>42</fpage>&#x2013;<lpage>61</lpage>. <pub-id pub-id-type="doi">10.1016/j.intmar.2018.07.004</pub-id></mixed-citation></ref>
<ref id="B43"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Roth</surname> <given-names>D.</given-names></name> <name><surname>Latoschik</surname> <given-names>M. E.</given-names></name></person-group> (<year>2020</year>). <article-title>Construction of the virtual embodiment questionnaire (VEQ).</article-title> <source><italic>IEEE Trans. Vis. Comput. Graph.</italic></source> <volume>26</volume> <fpage>3546</fpage>&#x2013;<lpage>3556</lpage>. <pub-id pub-id-type="doi">10.1109/TVCG.2020.3023603</pub-id> <pub-id pub-id-type="pmid">32941148</pub-id></mixed-citation></ref>
<ref id="B44"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Schnack</surname> <given-names>A.</given-names></name> <name><surname>Wright</surname> <given-names>M. J.</given-names></name> <name><surname>Elms</surname> <given-names>J.</given-names></name></person-group> (<year>2021b</year>). <article-title>Investigating the impact of shopper personality on behaviour in immersive virtual reality store environments.</article-title> <source><italic>J. Retail. Consum. Serv.</italic></source> <volume>61</volume>:<fpage>102581</fpage>. <pub-id pub-id-type="doi">10.1016/j.jretconser.2021.102581</pub-id></mixed-citation></ref>
<ref id="B45"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Schnack</surname> <given-names>A.</given-names></name> <name><surname>Wright</surname> <given-names>M. J.</given-names></name> <name><surname>Holdershaw</surname> <given-names>J. L.</given-names></name></person-group> (<year>2019</year>). <article-title>Immersive virtual reality technology in a three-dimensional virtual simulated store: Investigating telepresence and usability.</article-title> <source><italic>Food Res. Int.</italic></source> <volume>117</volume> <fpage>40</fpage>&#x2013;<lpage>49</lpage>. <pub-id pub-id-type="doi">10.1016/j.foodres.2018.01.028</pub-id> <pub-id pub-id-type="pmid">30736922</pub-id></mixed-citation></ref>
<ref id="B46"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Schnack</surname> <given-names>A.</given-names></name> <name><surname>Wright</surname> <given-names>M. J.</given-names></name> <name><surname>Holdershaw</surname> <given-names>J. L.</given-names></name></person-group> (<year>2020</year>). <article-title>An exploratory investigation of shopper behaviour in an immersive virtual reality store.</article-title> <source><italic>J. Consum. Behav.</italic></source> <volume>19</volume> <fpage>182</fpage>&#x2013;<lpage>195</lpage>. <pub-id pub-id-type="doi">10.1002/cb.1803</pub-id></mixed-citation></ref>
<ref id="B47"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Schnack</surname> <given-names>A.</given-names></name> <name><surname>Wright</surname> <given-names>M. J.</given-names></name> <name><surname>Holdershaw</surname> <given-names>J. L.</given-names></name></person-group> (<year>2021a</year>). <article-title>Does the locomotion technique matter in an immersive virtual store environment?&#x2013;Comparing motion-tracked walking and instant teleportation.</article-title> <source><italic>J. Retail. Consum. Serv.</italic></source> <volume>58</volume>:<fpage>102266</fpage>. <pub-id pub-id-type="doi">10.1016/j.jretconser.2020.102266</pub-id></mixed-citation></ref>
<ref id="B48"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Schubert</surname> <given-names>T.</given-names></name> <name><surname>Friedmann</surname> <given-names>F.</given-names></name> <name><surname>Regenbrecht</surname> <given-names>H.</given-names></name></person-group> (<year>2001</year>). <article-title>The experience of presence: Factor analytic insights.</article-title> <source><italic>Presence: Teleoperators Virtual Environ.</italic></source> <volume>10</volume> <fpage>266</fpage>&#x2013;<lpage>281</lpage>. <pub-id pub-id-type="doi">10.1162/105474601300343603</pub-id></mixed-citation></ref>
<ref id="B49"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Seo</surname> <given-names>Y.</given-names></name> <name><surname>Kim</surname> <given-names>M.</given-names></name> <name><surname>Jung</surname> <given-names>Y.</given-names></name> <name><surname>Lee</surname> <given-names>D.</given-names></name></person-group> (<year>2017</year>). <article-title>Avatar face recognition and self-presence.</article-title> <source><italic>Comput. Hum. Behav.</italic></source> <volume>69</volume> <fpage>120</fpage>&#x2013;<lpage>127</lpage>. <pub-id pub-id-type="doi">10.1016/j.chb.2016.12.020</pub-id></mixed-citation></ref>
<ref id="B50"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Slater</surname> <given-names>M.</given-names></name></person-group> (<year>2009</year>). <article-title>Place illusion and plausibility can lead to realistic behaviour in immersive virtual environments.</article-title> <source><italic>Philos. Trans. R. Soc. Lond. B. Biol. Sci.</italic></source> <volume>364</volume> <fpage>3549</fpage>&#x2013;<lpage>3557</lpage>. <pub-id pub-id-type="doi">10.1098/rstb.2009.0138</pub-id> <pub-id pub-id-type="pmid">19884149</pub-id></mixed-citation></ref>
<ref id="B51"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Soler-Dom&#x00ED;nguez</surname> <given-names>J. L.</given-names></name> <name><surname>De Juan</surname> <given-names>C.</given-names></name> <name><surname>Contero</surname> <given-names>M.</given-names></name> <name><surname>Alca&#x00F1;iz</surname> <given-names>M.</given-names></name></person-group> (<year>2020</year>). <article-title>I walk, therefore I am: A multidimensional study on the influence of the locomotion method upon presence in virtual reality.</article-title> <source><italic>J. Comput. Design Eng.</italic></source> <volume>7</volume> <fpage>577</fpage>&#x2013;<lpage>590</lpage>. <pub-id pub-id-type="doi">10.1093/jcde/qwaa040</pub-id></mixed-citation></ref>
<ref id="B52"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Tal</surname> <given-names>H.</given-names></name> <name><surname>Wright</surname> <given-names>M.</given-names></name> <name><surname>Prest</surname> <given-names>S.</given-names></name> <name><surname>Sandved-Smith</surname> <given-names>L.</given-names></name> <name><surname>Sacchet</surname> <given-names>M.</given-names></name></person-group> (<year>2026</year>). <article-title>Active inference, computational phenomenology, and advanced meditation: Toward the formalization of the experience of meditation.</article-title> <source><italic>Neurosci. Biobehav. Rev.</italic></source> <volume>182</volume>:<fpage>106539</fpage>. <pub-id pub-id-type="doi">10.1016/j.neubiorev.2025.106539</pub-id> <pub-id pub-id-type="pmid">41475512</pub-id></mixed-citation></ref>
<ref id="B53"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Tian</surname> <given-names>F.</given-names></name> <name><surname>Wang</surname> <given-names>X.</given-names></name> <name><surname>Cheng</surname> <given-names>W.</given-names></name> <name><surname>Lee</surname> <given-names>M.</given-names></name> <name><surname>Jin</surname> <given-names>Y.</given-names></name></person-group> (<year>2022</year>). <article-title>A comparative study on the temporal effects of 2D and VR emotional arousal.</article-title> <source><italic>Sensors</italic></source> <volume>22</volume>:<fpage>8491</fpage>. <pub-id pub-id-type="doi">10.3390/s22218491</pub-id> <pub-id pub-id-type="pmid">36366201</pub-id></mixed-citation></ref>
<ref id="B54"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Ulrichs</surname> <given-names>J.</given-names></name> <name><surname>Matviienko</surname> <given-names>A.</given-names></name> <name><surname>Quintero</surname> <given-names>L.</given-names></name></person-group> (<year>2024</year>). &#x201C;<article-title>Effects of third-person locomotion techniques on sense of embodiment in virtual reality</article-title>,&#x201D; in <source><italic>Proceedings of the International Conference on Mobile and Ubiquitous Multimedia</italic></source>, (<publisher-loc>New York, NY</publisher-loc>: <publisher-name>ACM</publisher-name>), <fpage>72</fpage>&#x2013;<lpage>81</lpage>.</mixed-citation></ref>
<ref id="B55"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>Y. K.</given-names></name> <name><surname>Datta</surname> <given-names>P.</given-names></name></person-group> (<year>2010</year>). <article-title>The mediating role of virtual experience in online purchase intentions</article-title>. <source><italic>Inf. Resour. Manag. J.</italic></source> <volume>23</volume>, <fpage>1</fpage>&#x2013;<lpage>18</lpage>. <pub-id pub-id-type="doi">10.4018/irmj.2010100104</pub-id></mixed-citation></ref>
<ref id="B56"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Witmer</surname> <given-names>B. G.</given-names></name> <name><surname>Singer</surname> <given-names>M. J.</given-names></name></person-group> (<year>1998</year>). <article-title>Measuring presence in virtual environments: A presence questionnaire.</article-title> <source><italic>Presence</italic></source> <volume>7</volume> <fpage>225</fpage>&#x2013;<lpage>240</lpage>. <pub-id pub-id-type="doi">10.1162/105474698565686</pub-id></mixed-citation></ref>
<ref id="B57"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Wright</surname> <given-names>M. J.</given-names></name> <name><surname>Sanguinetti</surname> <given-names>J. L.</given-names></name> <name><surname>Young</surname> <given-names>S.</given-names></name> <name><surname>Sacchet</surname> <given-names>M. D.</given-names></name></person-group> (<year>2023</year>). <article-title>Uniting contemplative theory and scientific investigation: Toward a comprehensive model of the mind.</article-title> <source><italic>Mindfulness</italic></source> <volume>14</volume> <fpage>1088</fpage>&#x2013;<lpage>1101</lpage>. <pub-id pub-id-type="doi">10.1007/s12671-023-02101-y</pub-id></mixed-citation></ref>
<ref id="B58"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Xie</surname> <given-names>Y.</given-names></name> <name><surname>Desouza</surname> <given-names>K. C.</given-names></name></person-group> (<year>2025</year>). <article-title>Feeling grounded when watching live streaming shows of highly anthropomorphic interactive virtual influencers: An exploratory study on customer opinions.</article-title> <source><italic>J. Bus. Res.</italic></source> <volume>199</volume>:<fpage>115507</fpage>. <pub-id pub-id-type="doi">10.1016/j.jbusres.2025.115507</pub-id></mixed-citation></ref>
<ref id="B59"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Xionghui</surname> <given-names>L.</given-names></name> <name><surname>Xiaoyu</surname> <given-names>Z.</given-names></name> <name><surname>Jiaming</surname> <given-names>Q.</given-names></name> <name><surname>Shuting</surname> <given-names>W.</given-names></name> <name><surname>Ping</surname> <given-names>D.</given-names></name></person-group> (<year>2024</year>). <article-title>&#x201C;Fake it, you can touch it&#x201D;: A study of virtual touch effects based on VR technology.</article-title> <source><italic>Virtual Reality</italic></source> <volume>29</volume>:<fpage>15</fpage>. <pub-id pub-id-type="doi">10.1007/s10055-024-01092-y</pub-id></mixed-citation></ref>
</ref-list>
<fn-group>
<fn id="n1" fn-type="custom" custom-type="edited-by"><p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/819301/overview">Valentina Cesari</ext-link>, Italian Institute of Technology (IIT), Italy</p></fn>
<fn id="n2" fn-type="custom" custom-type="reviewed-by"><p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3015848/overview">Eleonora Malloggi</ext-link>, University of Trento, Italy</p>
<p><ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3143632/overview">Enrico Cipriani</ext-link>, University of Pisa, Italy</p></fn>
</fn-group>
</back>
</article>