<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Psychol.</journal-id>
<journal-title>Frontiers in Psychology</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Psychol.</abbrev-journal-title>
<issn pub-type="epub">1664-1078</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpsyg.2024.1396946</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Psychology</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Unconscious multisensory integration: behavioral and neural evidence from subliminal stimuli</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" equal-contrib="yes">
<name><surname>Frumento</surname> <given-names>Sergio</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="author-notes" rid="fn0001"><sup>&#x2020;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/548070/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
</contrib>
<contrib contrib-type="author" equal-contrib="yes">
<name><surname>Preatoni</surname> <given-names>Greta</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="author-notes" rid="fn0001"><sup>&#x2020;</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Chee</surname> <given-names>Lauren</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Gemignani</surname> <given-names>Angelo</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Ciotti</surname> <given-names>Federico</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/2001057/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Menicucci</surname> <given-names>Danilo</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/1803020/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Raspopovic</surname> <given-names>Stanisa</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/894188/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Department of Surgical, Medical, Molecular and Critical Area Pathology, University of Pisa</institution>, <addr-line>Pisa</addr-line>, <country>Italy</country></aff>
<aff id="aff2"><sup>2</sup><institution>Laboratory for Neuroengineering, Department of Health Sciences and Technology, Institute of Robotics and Intelligent Systems, ETH Z&#x00FC;rich</institution>, <addr-line>Z&#x00FC;rich</addr-line>, <country>Switzerland</country></aff>
<aff id="aff3"><sup>3</sup><institution>Clinical Psychology Branch, Azienda Ospedaliero-Universitaria Pisana</institution>, <addr-line>Pisa</addr-line>, <country>Italy</country></aff>
<author-notes>
<fn fn-type="edited-by" id="fn0002">
<p>Edited by: Joel Frohlich, University of T&#x00FC;bingen, Germany</p>
</fn>
<fn fn-type="edited-by" id="fn0003">
<p>Reviewed by: Alice Rossi Sebastiano, University of Turin, Italy</p>
<p>Alessandra DallaVecchia, University of California, Los Angeles, United States</p>
</fn>
<corresp id="c001">&#x002A;Correspondence: Stanisa Raspopovic, <email>nesta.fale@gmail.com</email></corresp>
<fn fn-type="equal" id="fn0001">
<p><sup>&#x2020;</sup>These authors have contributed equally to this work</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>18</day>
<month>07</month>
<year>2024</year>
</pub-date>
<pub-date pub-type="collection">
<year>2024</year>
</pub-date>
<volume>15</volume>
<elocation-id>1396946</elocation-id>
<history>
<date date-type="received">
<day>06</day>
<month>03</month>
<year>2024</year>
</date>
<date date-type="accepted">
<day>04</day>
<month>07</month>
<year>2024</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2024 Frumento, Preatoni, Chee, Gemignani, Ciotti, Menicucci and Raspopovic.</copyright-statement>
<copyright-year>2024</copyright-year>
<copyright-holder>Frumento, Preatoni, Chee, Gemignani, Ciotti, Menicucci and Raspopovic</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<sec>
<title>Introduction</title>
<p>The prevailing theories of consciousness consider the integration of different sensory stimuli as a key component for this phenomenon to rise on the brain level. Despite many theories and models have been proposed for multisensory integration between supraliminal stimuli (e.g., the optimal integration model), we do not know if multisensory integration occurs also for subliminal stimuli and what psychophysical mechanisms it follows.</p>
</sec>
<sec>
<title>Methods</title>
<p>To investigate this, subjects were exposed to visual (Virtual Reality) and/or haptic stimuli (Electro-Cutaneous Stimulation) above or below their perceptual threshold. They had to discriminate, in a two-Alternative Forced Choice Task, the intensity of unimodal and/or bimodal stimuli. They were then asked to discriminate the sensory modality while recording their EEG responses.</p>
</sec>
<sec>
<title>Results</title>
<p>We found evidence of multisensory integration for supraliminal condition, following the classical optimal model. Importantly, even for subliminal trials participant&#x2019;s performances in the bimodal condition were significantly more accurate when discriminating the intensity of the stimulation. Moreover, significant differences emerged between unimodal and bimodal activity templates in parieto-temporal areas known for their integrative role.</p>
</sec>
<sec>
<title>Discussion</title>
<p>These converging evidences - even if preliminary and needing confirmation from the collection of further data - suggest that subliminal multimodal stimuli can be integrated, thus filling a meaningful gap in the debate about the relationship between consciousness and multisensory integration.</p>
</sec>
</abstract>
<kwd-group>
<kwd>subliminal</kwd>
<kwd>multimodal integration</kwd>
<kwd>multisensory integration</kwd>
<kwd>unconscious integration</kwd>
<kwd>subthreshold</kwd>
<kwd>crossmodal integration</kwd>
<kwd>under threshold</kwd>
</kwd-group>
<counts>
<fig-count count="3"/>
<table-count count="0"/>
<equation-count count="0"/>
<ref-count count="60"/>
<page-count count="11"/>
<word-count count="8883"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Consciousness Research</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="sec1">
<title>Introduction</title>
<p>Understanding how information is integrated and forms a conscious percept has been a challenge for scientists since decades: integration has been hypothesized to be a prerequisite of any conscious experience, a function of consciousness, or consciousness itself (<xref ref-type="bibr" rid="ref34">Mudrik et al., 2014</xref>).</p>
<p>Indeed, many theories [labeled as <italic>integration theories</italic> of consciousness (<xref ref-type="bibr" rid="ref49">Scott et al., 2018</xref>)] proposed that consciousness goes hand in hand with integration (<xref ref-type="bibr" rid="ref34">Mudrik et al., 2014</xref>; <xref ref-type="bibr" rid="ref60">Zher-Wen and Yu, 2023</xref>). For example, the global neuronal workspace theory (GNW) postulates that the conscious experience of a piece of information results from the integration of sensory stimuli processed by sensory as well as high-level areas (<xref ref-type="bibr" rid="ref14">Dehaene et al., 2003</xref>)&#x2014;though a recent version of GNW admitted the possibility of preconscious [i.e., &#x201C;a transient [&#x2026;] state of activity in which information is potentially accessible, yet not accessed&#x201D; (<xref ref-type="bibr" rid="ref13">Dehaene et al., 2006</xref>)] multimodal integration to explain the early (&#x003C;200&#x2009;ms) activations at which &#x201C;global brain activity splits between conscious and unconscious processing&#x201D; (<xref ref-type="bibr" rid="ref50">Sergent et al., 2021</xref>). The conscious access hypothesis (CAH) postulates that brain areas are independent and that &#x201C;consciousness is needed to integrate multiple sensory inputs&#x201D; (<xref ref-type="bibr" rid="ref2">Baars, 2002</xref>). The integrated information theory (IIT) postulates that &#x201C;consciousness requires both integration and differentiation&#x201D; and even some &#x201C;high-level cognitive performance such as judging whether a scene is congruous or incongruous [&#x2026;] lack integration and therefore are strictly unconscious&#x201D; (<xref ref-type="bibr" rid="ref55">Tononi et al., 2016</xref>). More broadly, the answer proposed for the consciousness/integration debate [e.g., that &#x201C;globally integrated perceptual scenes [&#x2026;] can only be conscious&#x201D; (<xref ref-type="bibr" rid="ref51">Seth and Bayne, 2022</xref>)] has been labeled as one of the main factors differentiating each theory of consciousness from the others (<xref ref-type="bibr" rid="ref51">Seth and Bayne, 2022</xref>).</p>
<p>The relationship between integration and consciousness has been investigated in studies showing that a subliminal stimulus facilitated the emergence of awareness of congruent supraliminal stimuli (<xref ref-type="bibr" rid="ref15">Deroy et al., 2014</xref>). While noteworthy, this evidence does not clarify whether integration is necessary for stimulus awareness or it just makes it more likely. In fact, the integration theories of consciousness admit the possibility for a supraliminal stimulus to be integrated with a subliminal one. On the other hand, &#x201C;only an experiment where both stimuli are unconsciously presented can truly probe unconscious multisensory integration&#x201D; (<xref ref-type="bibr" rid="ref34">Mudrik et al., 2014</xref>).</p>
<p>In this regard, only three recent studies administered subliminal stimuli coming from different sensory modalities during wakefulness (<xref ref-type="bibr" rid="ref60">Zher-Wen and Yu, 2023</xref>). <xref ref-type="bibr" rid="ref19">Faivre et al. (2014)</xref> found traces of multimodal integration for subliminal stimuli, but only after their previous supraliminal association (which suggests a determinant role of learning processes). <xref ref-type="bibr" rid="ref49">Scott et al. (2018)</xref> measured if visual and auditory stimuli (i.e., words) could be integrated resulting in a priming effect (i.e., associative learning of stimulus pairs). <xref ref-type="bibr" rid="ref9">Ching et al. (2019)</xref> checked whether an indicator of multimodal integration&#x2014;the McGurk effect, i.e., an interference between unmatched auditory and visual clues of syllables&#x2019; pronunciation (<xref ref-type="bibr" rid="ref32">Mcgurk and Macdonald, 1976</xref>)&#x2014;could be observed even in response to subliminal clues. All these studies reported a significant effect of the multimodal subliminal stimulation; however, whether this effect could be interpreted in terms of multisensory subliminal integration is debated (<xref ref-type="bibr" rid="ref9">Ching et al., 2019</xref>). Indeed, <xref ref-type="bibr" rid="ref9">Ching et al. (2019)</xref> cast the doubt that the studies actually measured a mere interaction&#x2014;rather than integration&#x2014;between stimuli: they propose that the unimodal information may persist and influence later processes, without combining that information in a Gestalt (<xref ref-type="bibr" rid="ref9">Ching et al., 2019</xref>).</p>
<p>Despite the noteworthy efforts profuse in these studies, the so-called integration theories of consciousness can be (dis)confirmed only by converging findings about a specific postulate (<xref ref-type="bibr" rid="ref51">Seth and Bayne, 2022</xref>). This is the reason for which, beyond understanding if subliminal integration is possible, a comprehensive study should search for neuroimaging evidence of this integration and should check whether it follows a psychophysical model comparable to that of conscious integration. One of the most famous models regarding multisensory integration has been proposed by <xref ref-type="bibr" rid="ref17">Ernst and Banks (2002)</xref>, who proposed that humans integrate information similarly to a maximum-likelihood estimation (MLE). In their seminal work, they found that adults integrate multisensory stimuli performing a weighted estimation of the available sensory cues. This model has been validated for supraliminal stimuli, and today, it is still unknown whether it applies also to stimuli that are presented outside of conscious awareness (i.e., subliminal stimuli) or not [as reported for children (<xref ref-type="bibr" rid="ref23">Gori et al., 2008</xref>; <xref ref-type="bibr" rid="ref35">Negen et al., 2019</xref>)].</p>
<p>To check whether multimodal subliminal stimuli can undergo integration&#x2014;and, if so, whether this is optimal or not&#x2014;we applied the same psychophysical model from the seminal study of <xref ref-type="bibr" rid="ref17">Ernst and Banks (2002)</xref> to one of the least studied combinations of subliminal sensory stimuli: visuo-haptic stimulation (<xref ref-type="bibr" rid="ref18">Faivre et al., 2017</xref>). In a separate session involving EEG, we then investigated brain responses to each class of stimuli in terms of event-related potentials (ERPs).</p>
</sec>
<sec sec-type="materials|methods" id="sec2">
<title>Materials and methods</title>
<p>In total, 12 healthy volunteers (7 males, 5 females) participated in the EEG session and 8 of them (5 males, 3 females) performed also the intensity discrimination experiment. All participants signed the informed consent. They were selected for having normal or corrected-to-normal vision and no history of sensory impairments. The experimental procedures were approved by the Institutional Ethics Committees of ETH Zurich (EK 2019-N-97) and carried out in accordance with the Declaration of Helsinki.</p>
<p>Healthy subjects were immersed in a virtual scenario (<xref ref-type="fig" rid="fig1">Figure 1A</xref>) by means of a VR headset. Visual stimuli consisted of gray circles appearing on the dorsum right foot; two TENS electrodes, applied on the same location of the right foot, delivered tactile stimuli consisting of electric pulses lasting 1&#x2009;ms (<xref ref-type="fig" rid="fig1">Figure 1B</xref>). Volunteers participated in 2 experiments: 12 participants underwent an ERP (event-related potentials) session consisting of a detection task asking to signal the conscious perception of visual, tactile, or visuo-tactile stimuli (<xref ref-type="fig" rid="fig1">Figure 1D</xref>); 8 of them also underwent a JND (just-noticeable differences) session consisting of a two-alternative forced-choice (2-AFC) task asking to discriminate which was the strongest between two stimuli (<xref ref-type="fig" rid="fig1">Figure 1C</xref>). The order of sessions was randomized and carried out on different days distanced by 1&#x2009;week.</p>
<fig position="float" id="fig1">
<label>Figure 1</label>
<caption>
<p>Experimental setting. <bold>(A)</bold> Set-up: participants wore a VR headset through which they were immersed in a scenario showing their virtual avatar&#x2019;s lower body and two VR controllers corresponding to those held; two TENS electrodes were placed on participant&#x2019;s right foot dorsum. For the ERP session, participants wore an EEG net. <bold>(B)</bold> Experimental conditions: visual (blue), tactile (green), and visuo-tactile (red). <bold>(C)</bold> JND session: participants had to judge which was the strongest among 2 consecutive stimuli by pressing the respective controller (a &#x201C;1&#x00B0;&#x201D; or &#x201C;2&#x00B0;&#x201D; icon appeared on the left and right controller). <bold>(D)</bold> ERP session: participants were asked to identify visual, tactile, or visuo-tactile nature of the trials by pressing the controller with the eye icon, the controller with the thunderbolt icon, or both, respectively. EEG, electroencephalography; VR, virtual reality; TENS, transcutaneous electrical nerve stimulation; solid contour lines&#x2009;=&#x2009;suprathreshold stimuli; dashed contour lines&#x2009;=&#x2009;subthreshold stimuli.</p>
</caption>
<graphic xlink:href="fpsyg-15-1396946-g001.tif"/>
</fig>
<p>During both experiments, a series of stimuli were administered, either unimodal (visual or tactile) or bimodal (visuo-tactile), either under or above the perceptual threshold. The visual stimulus consisted of a dark-red circle appearing on the right foot; the tactile stimulus consisted of an electrical current administered at the same point of the same foot; the visuo-tactile stimulus consisted of both the visual and the tactile stimuli administered simultaneously (<xref ref-type="fig" rid="fig1">Figure 1B</xref>). The conscious perception of the stimulus was manipulated by adjusting stimulus&#x2019; transparency (for visual stimuli) and by adjusting the pulsewidth and frequency of the electrical stimulation (for haptic stimuli). To find the perceptual threshold, a thorough characterization phase was performed immediately before each experimental session for each sensory mode and for each participant.</p>
<p>In this calibration phase (fully detailed in <xref rid="SM1" ref-type="supplementary-material">Supplementary Section 1.4</xref>), ramps of visual or tactile stimuli with increasing intensity were administered until the participant reported to have perceived a stimulus with above-chance confidence (i.e., detection threshold): the stimulus intensity was then averaged across at least 10 detection thresholds and increased or decreased by 15% to obtain subthreshold and suprathreshold stimuli. This criterion, derived from literature (<xref ref-type="bibr" rid="ref36">Nierhaus et al., 2015</xref>) and preventing habituation effects (<xref ref-type="bibr" rid="ref48">Rossi Sebastiano et al., 2022</xref>), was further validated in a subsequent check administering 15 subthreshold stimuli and 15 suprathreshold stimuli. This check was considered successful when at least 90% of the suprathreshold stimuli were felt, and at least 90% of the subthreshold stimuli were missed.</p>
<p>Importantly, the detection of each stimulus (regardless of it being administered above or below the calibrated threshold) was checked trial by trial, thus allowing the exclusion from the analyses of the incongruent stimuli (e.g., subliminal stimuli that were actually seen/felt). Indeed, participants were instructed to mark the stimulus as perceived by clicking the right or left VR controller according to the nature of percept (visual or tactile) or both for visuo-tactile stimuli. During both the calibration and the experimental phases, participants were further instructed to report the awareness of the stimuli when they perceived it with an above-chance confidence, i.e., not needing to be 100% sure: this approach guaranteed that stimuli marked as subliminal were, in fact, unconscious and not merely perceived with insufficient (though above-chance) confidence.</p>
<p>Evidence of multimodal integration was searched in terms of (1) significantly different accuracy in discriminating just-noticeable differences between consecutive trials and (2) significant differences between the EEG activity in response to unimodal or bimodal trials in a temporoparietal component that previous literature (<xref ref-type="bibr" rid="ref16">Driver and Noesselt, 2008</xref>; <xref ref-type="bibr" rid="ref24">Hidaka et al., 2015</xref>) indicated to account for visuo-tactile integration. Details about the statistical analyses implemented are fully reported in <xref rid="SM1" ref-type="supplementary-material">Supplementary Material</xref>: however, analysis files and EEG/JND data object of analysis are publicly shared at the Open Science Framework repository that can be reached at the link <ext-link xlink:href="https://osf.io/5wsnk/" ext-link-type="uri">https://osf.io/5wsnk/</ext-link>.</p>
<sec id="sec3">
<title>Just-noticeable differences (JNDs) session</title>
<p>Participants were asked to determine which was the strongest between the two trials, providing their answer through a VR controller (see <xref rid="SM1" ref-type="supplementary-material">Supplementary Material</xref>) in two conditions: comparing supraliminal trials and comparing subliminal trials. None of the stimuli (110 pairs of trials for participants) in the JND session had to be excluded from the analysis as the careful threshold calibration (see <xref rid="SM1" ref-type="supplementary-material">Supplementary Material</xref>) allowed all suprathreshold stimuli and none of the subthreshold ones to be perceived. We measured whether the distribution of answers followed a model of maximum-likelihood estimation (<xref ref-type="bibr" rid="ref17">Ernst and Banks, 2002</xref>) (see <xref rid="SM1" ref-type="supplementary-material">Supplementary Material</xref>) and whether the accuracy in discriminating bimodal or unimodal trials was significantly different and higher than chance.</p>
</sec>
<sec id="sec4">
<title>Event-related potentials (ERPs) session</title>
<p>The ERP session consisted of the administration of 1,050 randomized trials that participants had to correctly detect and discriminate as being either tactile, visual, or visuo-tactile. Suprathreshold conditions consisted of 100 visual (V<sub>SUPRA</sub>), 100 tactile (T<sub>SUPRA</sub>), and 100 visuo-tactile (V<sub>TSUPRA</sub>) trials; subliminal conditions consisted of 250 visual (V<sub>SUB</sub>), 250 tactile (T<sub>SUB</sub>), and 250 visuo-tactile (VT<sub>SUB</sub>) trials. Stimuli were administered in a randomized order, with an intertrial interval (ITI) jittered between 1 and 2&#x2009;s.</p>
<p>The EEG signals were acquired at 256&#x2009;Hz with a 64-electrode cap, maintaining impedance below 5&#x2009;k&#x03A9; (BE Plus LTM, EBNeuro, Florence, IT). Recorded EEG signals were submitted to the following preprocessing steps: (1) EEG signals were filtered in the 0.5&#x2013;45-Hz filter (EEGLAB basic FIR filter); (2) EEG signals were visually scrolled for manual artifact identification, and any segment containing idiosyncratic artifacts (mostly due to small movements and temporary declines of signal quality) were highlighted and thus removed (EEGLAB); (3) noisy channels were identified, and their signal was substituted with signal obtained via spline interpolation (<xref ref-type="bibr" rid="ref27">Junghofer et al., 2000</xref>); (4) EEG signals were submitted to the independent component analysis [Infomax (<xref ref-type="bibr" rid="ref4">Bell and Sejnowski, 1995</xref>)] in order to remove ocular, cardiac, and muscular artifacts (<xref ref-type="bibr" rid="ref30">Makeig et al., 1996</xref>) artifactual components were selected and removed based on a visual inspection of the component time course and its power spectrum, as well as on the analytic tools developed in the ICLabel toolbox (<xref ref-type="bibr" rid="ref40">Pion-Tonachini et al., 2019</xref>) to support visual judgment examination; (5) the obtained EEG signals were finally re-referenced from the vertex to the common reference (<xref ref-type="bibr" rid="ref39">Piarulli et al., 2010</xref>).</p>
<p>The scientific literature about EEG correlates of multisensory integration involves approaches [e.g., Global Field Power (<xref ref-type="bibr" rid="ref37">Noel et al., 2019</xref>)], whole-scalp point-by-point analysis (<xref ref-type="bibr" rid="ref20">Fossataro et al., 2023</xref>), ERP super-additivity (<xref ref-type="bibr" rid="ref47">Ronga et al., 2021</xref>) that could be not perfectly suitable for the kind of data collected for the present study (i.e., correlates of very weak stimuli delivered slightly above or below the awareness threshold), as we better contextualized in the <xref rid="SM1" ref-type="supplementary-material">Supplementary Material</xref> (where we also provide ERP data obtained through more traditional approaches).</p>
<p>To check for traces of processing and integration of stimuli in the EEG recordings, we extracted via ICA (<xref ref-type="bibr" rid="ref4">Bell and Sejnowski, 1995</xref>) four independent components temporally related to trials (<xref rid="SM1" ref-type="supplementary-material">Supplementary Figure S3</xref>) and the related activity templates (i.e., its time course averaged over trials).</p>
<p>The focus was directed to any component showing, at least for supraliminal stimuli, significant differences between the bimodal trials and both their unimodal correspondents while these were not significantly differing from each other. The strictness of these criteria aimed at excluding possible differences could be driven by the processing of single unimodal stimuli.</p>
<p>More in detail, for all subjects we selected trials for which the participant&#x2019;s answer was congruent with the delivered stimulus/i (incongruent trials were 47% for V<sub>SUPRA</sub>, 35% for T<sub>SUPRA</sub>, 64% for VT<sub>SUPRA</sub>, 22% for V<sub>SUB</sub>, 4% for T<sub>SUB</sub>, 23% for VT<sub>SUB</sub>). ERP was thus extracted from the EEG signal based on the time location of congruent trials: each segment started from 100&#x2009;ms before to 400&#x2009;ms after each trial onset. To distinguish the putative temporoparietal component accounting for visuo-tactile integration (<xref ref-type="bibr" rid="ref16">Driver and Noesselt, 2008</xref>; <xref ref-type="bibr" rid="ref24">Hidaka et al., 2015</xref>) from components accounting for the processing of unimodal stimuli (<xref ref-type="bibr" rid="ref29">Laurino et al., 2014</xref>), all ERP signals were concatenated and submitted to the independent component analysis (ICA).</p>
<p>The ICA-based ERP decomposition consisted of deriving a special combination of the different EEG channel signals that allow separating components originating from different brain sources (<xref ref-type="bibr" rid="ref26">Jung et al., 2001</xref>). Thus, the ICA-based ERP decomposition modeled ERPs as the sum of temporally independent components (that is, with statistically independent time course) arising from distinct, spatially fixed, brain processes. Herein, a group-based ICA decomposition (<xref ref-type="bibr" rid="ref25">Himberg et al., 2004</xref>; <xref ref-type="bibr" rid="ref33">Menicucci et al., 2014</xref>) was performed by applying the Infomax ICA algorithm (<xref ref-type="bibr" rid="ref4">Bell and Sejnowski, 1995</xref>) on the concatenated ERPs of all trial types and subjects. This approach implied the assumption that all subjects had comparable brain components and that the stimulus awareness (subliminal or supraliminal) modulated the time course of components but did not affect their scalp distribution (<xref ref-type="bibr" rid="ref26">Jung et al., 2001</xref>).</p>
<p>The number of underlying components was determined based on a preliminary principal component analysis by retaining components explaining 95% of the total ERP variance. On this basis, four components were retained and among them, the temporoparietal component was selected. The scalp distribution (i.e., the contribution to the potentials recorded at each scalp channel) and the time course of independent components were provided by ICA as corresponding to the demixing matrix and to the activation time series, respectively. Finally, as ICA was performed at the group level with all trials and subjects ERPs concatenated together, from each component activation time series we derived the activity templates showing the average component activity for each stimulus type and subject.</p>
<p>For the selected component, we compared (subject-based paired <italic>t</italic>-test) bimodal and unimodal activity templates for both subliminal and supraliminal modalities. Searching for traces of multimodal integration, we checked for the latencies at which bimodal activity templates were significantly different from their unimodal correspondents simultaneously, while the unimodal activity templates were not significantly differing from each other: in addition, to preserve both the sensitivity and the reliability of the statistical analysis, we considered worth of being interpreted as positive results only consecutively significant latencies with a total duration of at least 12&#x2009;ms (i.e., three consecutive samples). For the sake of completeness, the same comparisons were performed for the other components extracted and are reported in <xref rid="SM1" ref-type="supplementary-material">Supplementary Figure S4</xref>. Finally, to provide further information about the brain origin of the component, we used the EEGLAB Dipfit 4.3 plugin to estimate the equivalent current dipoles adjusted by means of the boundary element model (BEM) of the head (<xref ref-type="bibr" rid="ref5">Bocharov et al., 2020</xref>).</p>
<p>This approach stands on the assumption that local cortical connections are characterized by a much higher density than longer range ones; this premised, it can be assumed that synchronous coupling of neuronal activity isolated by ICA typically occurs within a single brain area. The resulting scalp maps can highly resemble the projection of a single equivalent dipole or a bilaterally symmetric pair of dipoles and may thus represent a projection of activity from one patch&#x2014;or two symmetric patches&#x2014;of the cortex.</p>
<p>The combination of all the above-mentioned criteria implies that significant differences possibly observed in one or more components could be more reliably interpreted as a marker of subliminal multisensory integration if (1) they appeared in the supraliminal conditions too, for which there is robust evidence that multisensory integration occurs, (2) the topography is compatible with multisensory hubs reported in the scientific literature, (3) their duration of at least 12&#x2009;ms reasonably rules out the occurrence of mere coincidences, and (4) the absence of simultaneous significant differences between the two unimodal conditions reasonably rules out that the bimodal activity template is over-representing the processing of one kind of stimulus only.</p>
</sec>
</sec>
<sec sec-type="results" id="sec5">
<title>Results</title>
<sec id="sec6">
<title>Just-noticeable differences (JNDs)</title>
<p>For the suprathreshold condition (<xref ref-type="fig" rid="fig2">Figure 2A</xref> and <xref rid="SM1" ref-type="supplementary-material">Supplementary Figure S1A</xref>), the pseudo-R2 (see <xref rid="SM1" ref-type="supplementary-material">Supplementary Material</xref>) indicated a high Goodness of fit (<inline-formula>
<mml:math id="M1">
<mml:msubsup>
<mml:mi>R</mml:mi>
<mml:mi>L</mml:mi>
<mml:mn>2</mml:mn>
</mml:msubsup>
</mml:math>
</inline-formula> = 0.78), meaning that the model fitted well the experimental results. The visual JND (JND_V<sub>SUPRA</sub>&#x2009;=&#x2009;3.006) was not statistically different (<italic>p</italic>&#x2009;=&#x2009;0.62) from the tactile JND (JND_T<sub>SUPRA</sub>&#x2009;=&#x2009;2.95), indicating that the values were rescaled correctly to allow similar weights (see <xref rid="SM1" ref-type="supplementary-material">Supplementary Material</xref>) and avoid having a dominant sensory modality (<xref rid="SM1" ref-type="supplementary-material">Supplementary Figure S2</xref>). As predicted by the MLE model, the bimodal condition (JND_VT<sub>SUPRA</sub>&#x2009;=&#x2009;1.86) was significantly smaller compared to both the tactile (<italic>p</italic>&#x2009;=&#x2009;0.006, power&#x2009;=&#x2009;0.97, es&#x2009;=&#x2009;3.81) and visual JND (<italic>p</italic>&#x2009;=&#x2009;0.0007, power&#x2009;=&#x2009;0.99, <italic>effect size&#x2009;=&#x2009;5.02</italic>) (<xref ref-type="fig" rid="fig2">Figure 2B</xref>). Moreover, the bimodal JND was the most similar to the predicted behavior from the MLE model (JND_MLE<sub>SUPRA</sub>&#x2009;=&#x2009;2.1).</p>
<fig position="float" id="fig2">
<label>Figure 2</label>
<caption>
<p>Results for the JND session for all conditions: touch (green), visual (blue), and visuo-tactile (red). <bold>(A&#x2013;C)</bold> Results for the suprathreshold condition: <bold>(A)</bold> psychometric curves of two exemplary subjects; <bold>(B)</bold> JNDs for all subjects. Bar plots represent mean and CI. The dashed horizontal line represents the predicted behavior following the MLE model; <bold>(C)</bold> accuracy. Results are presented as mean&#x2009;&#x00B1;&#x2009;standard error of the mean. <bold>(D&#x2013;F)</bold> Results for the subthreshold condition: <bold>(D)</bold> psychometric curves of two exemplary subjects; <bold>(E)</bold> JNDs for all subjects. Bar plots represent mean and standard error of the mean. The dashed horizontal line represents the predicted behavior following the MLE model; <bold>(F)</bold> accuracy: dashed line represents chance level. Results are presented as mean&#x2009;&#x00B1;&#x2009;standard error of the mean. T, tactile; V, visual; VT, visuo-tactile; MLE, maximum-likelihood estimation.</p>
</caption>
<graphic xlink:href="fpsyg-15-1396946-g002.tif"/>
</fig>
<p>For the subliminal condition (<xref ref-type="fig" rid="fig2">Figure 2D</xref> and <xref rid="SM1" ref-type="supplementary-material">Supplementary Figure S1B</xref>), the pseudo-R2 indicated a very low Goodness of fit (<inline-formula>
<mml:math id="M2">
<mml:msubsup>
<mml:mi>R</mml:mi>
<mml:mi>L</mml:mi>
<mml:mn>2</mml:mn>
</mml:msubsup>
</mml:math>
</inline-formula> = 0.23), meaning that the model could not fit sufficiently well the experimental data. Moreover, none of the experimental conditions differed (<italic>p</italic>&#x2009;&#x003E;&#x2009;0.5) in their JND (JND_T<sub>SUB</sub>&#x2009;=&#x2009;19.98, JND_V<sub>SUB</sub>&#x2009;=&#x2009;34.2, JND_VT<sub>SUB</sub>&#x2009;=&#x2009;14.3, JND_MLE<sub>SUB</sub>&#x2009;=&#x2009;17.26) (<xref ref-type="fig" rid="fig2">Figure 2E</xref>).</p>
<p>We then explored how well the subjects performed in each condition, hence how accurate they were in indicating which was the most intense stimulus and if a bimodal stimulation would allow a better performance. In the suprathreshold condition (<xref ref-type="fig" rid="fig2">Figure 2C</xref>), the bimodal performance (ACC_VT<sub>SUPRA</sub>&#x2009;=&#x2009;79.3%) was significantly higher than the tactile one (ACC_T<sub>SUPRA</sub>&#x2009;=&#x2009;70.5%, <italic>p</italic>&#x2009;=&#x2009;0.03, power&#x2009;=&#x2009;0.97, <italic>effect size&#x2009;=&#x2009;1.44</italic>) and the visual one (ACC_V<sub>SUPRA</sub>&#x2009;=&#x2009;70%, <italic>p</italic>&#x2009;=&#x2009;0.03, power&#x2009;=&#x2009;0.98, <italic>effect size&#x2009;=&#x2009;1.52</italic>). In the subthreshold condition, the accuracies were not significantly different from each other (<italic>p</italic>&#x2009;=&#x2009;0.67). However, when comparing these to the chance level (50%), the bimodal condition had a significantly higher accuracy (ACC_VT<sub>SUB</sub>&#x2009;=&#x2009;55%, <italic>p</italic>&#x2009;=&#x2009;0.04, power&#x2009;=&#x2009;0.53, <italic>effect size&#x2009;=&#x2009;0.7</italic>) (<xref ref-type="fig" rid="fig2">Figure 2E</xref>).</p>
</sec>
<sec id="sec7">
<title>Event-related potentials (ERPs)</title>
<p>A first analysis of ERPs compared unimodal to bimodal stimuli finding no significant differences worth being interpreted as signs of multimodal integration: details about this analysis are reported in the <xref rid="SM1" ref-type="supplementary-material">Supplementary Sections 2.2, 2.3</xref>, and a representative selection of these results is visible in <xref rid="SM1" ref-type="supplementary-material">Supplementary Figures S5&#x2013;S8</xref>.</p>
<p>For what concerns the ICA-based ERP decomposition, three out of the four extracted components were excluded due to the absence of statistical significance across the different conditions, as illustrated in <xref rid="SM1" ref-type="supplementary-material">Supplementary Figure S4</xref>. This finding precludes their interpretation as a reliable marker of multisensory integration. In fact, <xref rid="SM1" ref-type="supplementary-material">Supplementary Figure S4</xref> shows that Component A (visual component exhibiting a late positive peak for all visual supraliminal stimulations), Component B (central areas that isolate the P300), and Component C (left temporoparietal area) lacked differences interpretable as multimodal integration (<xref rid="SM1" ref-type="supplementary-material">Supplementary Figure S4</xref>). Importantly, these differences were missing also in supraliminal conditions, for which behavioral correlates indicated that bimodal stimuli were in fact integrated [coherently with the scientific literature (<xref ref-type="bibr" rid="ref17">Ernst and Banks, 2002</xref>)].</p>
<p>On the other hand, a temporoparietal component (<xref ref-type="fig" rid="fig3">Figure 3A</xref>) showed significant differences, coherently to the hypothesis that in these regions visuo-tactile integration would occur (<xref ref-type="bibr" rid="ref16">Driver and Noesselt, 2008</xref>; <xref ref-type="bibr" rid="ref24">Hidaka et al., 2015</xref>). <xref ref-type="fig" rid="fig3">Figure 3B</xref> shows the scalp localization and the estimated current dipoles of this component. The dipole locations were compatible with sources placed in Brodmann areas 37 and 19, visual areas known for their associative functions (e.g., their lesion impairs the ability to compute a semantic representation of stimuli) (<xref ref-type="bibr" rid="ref43">Race and Hillis, 2015</xref>). For what concerns the supraliminal stimuli, differences in this component&#x2019;s activity templates were located around latencies of 300&#x2009;ms, with bimodal trials producing higher positive responses. For what concerns the subliminal stimuli, differences started from 200&#x2009;ms (a latency at which sensory stimuli evoke specific responses even if delivered during sleep) (<xref ref-type="bibr" rid="ref29">Laurino et al., 2014</xref>) and highlighted a steady positivity differentiating bimodal-related activity template from the corresponding unimodal ones.</p>
<fig position="float" id="fig3">
<label>Figure 3</label>
<caption>
<p>Topography and activity templates of the integrative temporoparietal independent component in response to supraliminal or subliminal, unimodal, or bimodal trials. <bold>(A)</bold> Component scalp topography and the related dipole locations <bold>(B)</bold>. <bold>(C)</bold> Activity templates for supraliminal trials and subliminal trials <bold>(D)</bold>. Thicker lines (solid or dashed for supraliminal or subliminal trials, respectively) represent the group mean; thinner ones represent the group mean&#x2009;&#x00B1;&#x2009;the standard error of the mean (SEM); the black dots represent latencies at which bimodal trials significantly (<italic>p</italic>&#x2009;&#x003C;&#x2009;0.05) differ from both unimodal trials simultaneously, while these latter were not significantly differing from each other; a.u., arbitrary units. The earlier component appearing 50&#x2009;ms after stimulus onset is driven by the tactile stimulus, coherently with previous results comparing ERPs of subliminal and supraliminal tactile stimuli (<xref ref-type="bibr" rid="ref36">Nierhaus et al., 2015</xref>); the time windows of the latter components (190&#x2013;260&#x2009;ms for subliminal conditions; 275&#x2013;300&#x2009;ms for supraliminal conditions) are temporally complementary and coherent with the timing (250&#x2013;300&#x2009;ms) of the processes that differentiate conscious from unconscious processing (<xref ref-type="bibr" rid="ref50">Sergent et al., 2021</xref>).</p>
</caption>
<graphic xlink:href="fpsyg-15-1396946-g003.tif"/>
</fig>
<p>Considering visual, tactile, and visuo-tactile trials separately, the comparisons between supraliminal and subliminal-related activity templates (<xref rid="SM1" ref-type="supplementary-material">Supplementary Figure S3</xref>) showed much of the differences at latencies after 200&#x2009;ms, with supraliminal responses exhibiting higher positivity compared to subliminal ones peaking at 300&#x2009;ms <italic>circa</italic>. This higher positivity is coherent with the scientific literature describing P300 responses to consciously detected stimuli (<xref ref-type="bibr" rid="ref38">Pfabigan et al., 2014</xref>), though is debated whether P300 should be considered a correlate of conscious perception or detection report (<xref ref-type="bibr" rid="ref56">Tsuchiya et al., 2015</xref>).</p>
<p>The methodological reasons (e.g., weak intensity of stimuli) that could have possibly resulted in a lack of replication between the two analyses (ERP and ICA-based ERP decomposition) are deepened in the <xref rid="SM1" ref-type="supplementary-material">Supplementary Sections 2.2, 2.3</xref>.</p>
</sec>
</sec>
<sec sec-type="discussion" id="sec8">
<title>Discussion</title>
<p>The results reported in the present study show both behavioral and neuroimaging significant differences between bimodal (visuo-tactile) and unimodal (visual or tactile) stimuli, even when these stimuli were subliminal.</p>
<p>The supraliminal conditions of the JND session replicated the results initially reported in the seminal paper by <xref ref-type="bibr" rid="ref17">Ernst and Banks (2002)</xref>: bimodal stimuli were integrated following the expected maximum-likelihood estimation (MLE) model and were discriminated with an accuracy significantly higher than their unimodal counterparts (<xref ref-type="fig" rid="fig2">Figure 2C</xref>).</p>
<p>On the other side, the subliminal conditions did not show a significantly higher accuracy for the discrimination of bimodal (with respect to unimodal) stimuli, nor an adherence to the MLE model; however, participants showed significantly higher-than-chance performance in discriminating just-noticeable differences (JNDs) between subliminal stimuli only if these were bimodal (<xref ref-type="fig" rid="fig2">Figure 2F</xref>).</p>
<p>Should these significant differences be interpreted as a behavioral correlate of multisensory integration, even if they do not perfectly match the results reported for supraliminal conditions? The answer to this question is debatable, as it can vary depending on what we mean by &#x201C;multisensory integration&#x201D;; however, authoritative definitions of multisensory integration [e.g., &#x201C;the process by which inputs from two or more senses are combined to form a product that is distinct from [&#x2026;] the components from which it is created,&#x201D; and &#x201C;a statistically significant difference between the response evoked by a cross-modal combination of stimuli and that evoked by the most effective of its components individually&#x201D; (<xref ref-type="bibr" rid="ref53">Stein et al., 2014</xref>)] fit with the evidence shown for subliminal conditions (i.e., a significantly higher-than-chance performance in discriminating bimodal stimuli, but not unimodal ones). Coherently with this definition, it is worth noting that multisensory integration does not always follow the model described by <xref ref-type="bibr" rid="ref17">Ernst and Banks (2002)</xref>: for example, the results they obtained in adults did not match those later obtained in children (<xref ref-type="bibr" rid="ref23">Gori et al., 2008</xref>; <xref ref-type="bibr" rid="ref35">Negen et al., 2019</xref>). This was not interpreted as an inability of children to integrate multisensory stimuli, but rather as the result of a task-dependent strategy&#x2014;that changes across development&#x2014;attributing a different weight to sensory modes (<xref ref-type="bibr" rid="ref23">Gori et al., 2008</xref>).</p>
<p>This all considered, the significantly higher-than-chance accuracy in discriminating subliminal bimodal stimuli&#x2014;but not unimodal ones&#x2014;is reasonably interpretable as a clue of multisensory integration, even if different from that occurring for supraliminal stimuli.</p>
<p>The ICA-based ERP decomposition resulted in four components (the topography of which is shown in <xref rid="SM1" ref-type="supplementary-material">Supplementary Figures S3A&#x2013;D</xref>) for which significant differences are reported when comparing each class of supraliminal stimuli with its subliminal counterpart (<xref rid="SM1" ref-type="supplementary-material">Supplementary Figure S3</xref>), coherently with the scientific literature (<xref ref-type="bibr" rid="ref36">Nierhaus et al., 2015</xref>). Multimodal integration was expected to result in significant differences between bimodal (i.e., visuo-tactile) stimuli and both their unimodal (i.e., visual or tactile) counterparts while these were not significantly differing from each other: the meeting of all these conditions can reasonably guarantee that significant differences between bimodal and unimodal stimuli are not solely attributable to the influence of the more dominant unimodal stimulus. Coherently with the scientific literature (<xref ref-type="bibr" rid="ref38">Pfabigan et al., 2014</xref>) and with the results of the JND session, these significant differences were found in one of the four components (<xref ref-type="fig" rid="fig3">Figure 3A</xref>); while this was expected in comparing supraliminal stimuli (<xref ref-type="fig" rid="fig3">Figure 3C</xref>), their presence in comparing subliminal stimuli (<xref ref-type="fig" rid="fig3">Figure 3D</xref>) represents an unprecedented result. Interestingly, the latencies at which these significant differences occurred (i.e., 190&#x2013;260&#x2009;ms for subliminal conditions; 275&#x2013;300&#x2009;ms for supraliminal conditions) seem to be complementary rather than overlapping, suggesting that the underlying processing was qualitatively different. This could mean that the brain processes underlying the multisensory integration of subliminal stimuli are not just a weaker version of those related to the multisensory integration of supraliminal stimuli: the &#x201C;ignition&#x201D; &#x2014;i.e., &#x201C;a sudden non-linear transition toward a state of globally increased brain activity&#x201D; (<xref ref-type="bibr" rid="ref14">Dehaene et al., 2003</xref>) thought to result in conscious access to stimuli (<xref ref-type="bibr" rid="ref51">Seth and Bayne, 2022</xref>)&#x2014;could consist of recurrent processing (not recurring enough, in the case of subliminal stimuli) rather than of a broadcast of information across distant areas. Interestingly, the temporal window of ignition (250&#x2013;300&#x2009;ms post-stimulus) estimated in a recent study (<xref ref-type="bibr" rid="ref50">Sergent et al., 2021</xref>) begins at the end of the temporal window that we interpret as an EEG correlate of subliminal multisensory integration, allowing us to hypothesize that ignition follows a subliminal multisensory integration whose accuracy, while sub-optimal, could represent &#x201C;an early evaluator of sensory coherence&#x201D; (<xref ref-type="bibr" rid="ref9">Ching et al., 2019</xref>).</p>
<p>Finally, while the topographical distribution of components showing no significant differences included primary sensory areas (e.g., <xref rid="SM1" ref-type="supplementary-material">Supplementary Figures S3A, S4A</xref>), the source estimated for the parieto-temporal component showing significant differences (<xref ref-type="fig" rid="fig3">Figure 3B</xref>) indicated the involvement of associative visual areas (i.e., Brodmann areas 37 and 19) (<xref ref-type="bibr" rid="ref16">Driver and Noesselt, 2008</xref>; <xref ref-type="bibr" rid="ref24">Hidaka et al., 2015</xref>)&#x2014;coherently with our interpretation of differences in this component as a correlate of multisensory integration. Our findings provide supportive evidence for the hypothesis that visual and tactile stimuli undergo multimodal integration, even when presented subliminally. However, we acknowledge that further analysis is needed to decisively determine whether the responses to bimodal stimuli are distinctly different from the mere additive effects of unimodal stimuli.</p>
</sec>
<sec sec-type="conclusions" id="sec9">
<title>Conclusion</title>
<p>The present study introduces a novel paradigm to investigate both behavioral and neuroimaging correlates of the integration of bimodal stimuli that are both subliminal, thus testing a postulate of integration theories of consciousness (<xref ref-type="bibr" rid="ref49">Scott et al., 2018</xref>) and filling a noteworthy gap in the scientific literature&#x2014;so far reporting only behavioral correlates of multimodal subliminal integration or the integration of a subliminal stimulus with a supraliminal one (<xref ref-type="bibr" rid="ref60">Zher-Wen and Yu, 2023</xref>).</p>
<p>The relatively small sample involved&#x2014;although at least double that of Ernst and Banks&#x2019; seminal paper (<xref ref-type="bibr" rid="ref17">Ernst and Banks, 2002</xref>)&#x2014;implies caution in generalizing the present data. Nevertheless, with respect to the criticisms typically raised in the research lines involving subliminal stimulation (<xref ref-type="bibr" rid="ref59">Wiens, 2007</xref>; <xref ref-type="bibr" rid="ref3">Baroni et al., 2021</xref>; <xref ref-type="bibr" rid="ref22">Frumento et al., 2021</xref>, <xref ref-type="bibr" rid="ref21">2022</xref>; <xref ref-type="bibr" rid="ref7">Cesari et al., 2023</xref>) and multisensory subliminal integration (<xref ref-type="bibr" rid="ref34">Mudrik et al., 2014</xref>), the methodological robustness of the present study was guaranteed by (1) the trial-by-trial assessment of stimulus detection, (2) the fine calibration measured (and furtherly checked) for each sensory mode, for each participant, before each experiment, (3) the exclusion of incongruent stimuli from analysis, and (4) the subliminal administration of stimuli coming from different sensory modes (tactile and visual).</p>
<p>The results show significant differences between bimodal and unimodal stimuli in both behavioral and neuroimaging correlates. This evidence supports each other in suggesting that conscious awareness is not needed to integrate stimuli coming from different sensory modes. While there is not a universally agreed agreement on what multisensory integration is, the reported evidences fit with authoritative definitions (<xref ref-type="bibr" rid="ref53">Stein et al., 2014</xref>). To this regard, it is worth noting that each of the reported results, taken individually, could be not considered a definitive proof of subliminal multimodal integration: as an example, the observation that unimodal/bimodal differences are earlier for subliminal than for supraliminal stimuli could be interpretable as a confirmation of the relevance of recurring processes for awareness proposed by some integration theories of consciousness (<xref ref-type="bibr" rid="ref49">Scott et al., 2018</xref>). Similarly, even if the significant differences between the JND and EEG correlates of unimodal and bimodal subliminal stimuli contrast some IIT postulates [e.g., &#x201C;consciousness requires both integration and differentiation&#x201D;; &#x201C;high-level cognitive performance such as judging whether a scene is congruous or incongruous [&#x2026;] lack integration and therefore are strictly unconscious (<xref ref-type="bibr" rid="ref55">Tononi et al., 2016</xref>)&#x201D;], the comparisons between subliminal and supraliminal stimuli (<xref rid="SM1" ref-type="supplementary-material">Supplementary Figures S3, S5&#x2013;S8</xref>) replicate those expected by IIT (<xref ref-type="bibr" rid="ref36">Nierhaus et al., 2015</xref>).</p>
<p>However, the convergence of behavioral and neuroimaging correlates of subliminal stimulations and their coherency with the correlates of supraliminal integration can reasonably be interpreted, as a whole, as convincing evidence that subliminal multimodal integration is possible. Indeed, our brain can not only integrate multimodal stimuli we are not aware of, but it can also trick ourselves into believing to be randomly guessing in a cognitive task (e.g., discriminating just-noticeable differences between subliminal stimuli) while in fact our accuracy is significantly higher than chance. Is our consciousness just a passive spectator who deludes himself about being relevant for higher-order functions? To answer this question, it is worth looking at the other side of the coin, i.e., comparing subliminal stimuli with their supraliminal counterparts. In fact, supraliminal multimodal integration showed qualitatively different correlates with respect to those of subliminal multimodal integration. In particular, the statistical models describing the integration of bimodal trials in the JND session differed depending on their stimulation being supraliminal or subliminal: the former followed the model of maximum-likelihood estimation [replicating the seminal experiment by <xref ref-type="bibr" rid="ref17">Ernst and Banks (2002)</xref>], while the latter did not. However, when the stimuli were subliminal, a form of integration still occurred. Indeed, we observed a higher-than-chance accuracy only for bimodal trials, which was not as accurate as for supraliminal trials&#x2014;similar to what was demonstrated in children, who are nevertheless thought to integrate multisensory stimuli (<xref ref-type="bibr" rid="ref23">Gori et al., 2008</xref>; <xref ref-type="bibr" rid="ref35">Negen et al., 2019</xref>). This form of integration, if confirmed also in modality-independent integrative regions (<xref ref-type="bibr" rid="ref52">Setti et al., 2023</xref>) and/or in superior colliculus (<xref ref-type="bibr" rid="ref54">Tamietto et al., 2010</xref>), could underlie phenomena such as obstacle avoidance in blindsight (<xref ref-type="bibr" rid="ref12">de Gelder et al., 2008</xref>). At least in the context of this specific task, the role of consciousness resembles that of an optimal integrator refining an accuracy that already resulted to be significantly higher than chance at an unconscious level. Further studies are needed to test the hypothesis that consciousness is an optimal integrator: in fact, the stimuli administered in the present study differed not only for being supraliminal or subliminal but also for their absolute intensity. To rule out the possibility that optimal integration occurred only for supraliminal stimuli because of their higher absolute intensity, a replication of the JND session is needed administering stimuli the intensity of which falls exactly on the calibrated threshold (rather than slightly below or above it), so that <italic>circa</italic> half of them should result subliminal.</p>
<p>In conclusion, the present study is the first to describe the integration of bimodal stimuli occurring even if they are subliminal, thus opening impactful clinical and theoretical implications. The former could pave the way for the implementation of subthreshold stimulations in rehabilitation neuroprostheses (<xref ref-type="bibr" rid="ref44">Raspopovic, 2020</xref>; <xref ref-type="bibr" rid="ref41">Preatoni et al., 2021</xref>; <xref ref-type="bibr" rid="ref8">Chee et al., 2022</xref>), enhancing their acceptability (<xref ref-type="bibr" rid="ref42">Preatoni et al., 2021</xref>; <xref ref-type="bibr" rid="ref57">Valle et al., 2021</xref>; <xref ref-type="bibr" rid="ref6">Cesari et al., 2024</xref>) while maintaining a comparable efficacy. In this regard, further studies are needed to investigate the integration of a subliminal stimulus with a supraliminal one and to test the clinical applicability of the results [e.g., in clinical populations susceptible to interoceptive specificities (<xref ref-type="bibr" rid="ref1">Alf&#x00EC; et al., 2023</xref>; <xref ref-type="bibr" rid="ref10">Cipriani et al., 2024</xref>)].</p>
<p>For what concerns the theoretical implications, the significant differences between unimodal and bimodal subliminal stimuli observed in both the JND and the EEG sessions converge to suggest that multimodal integration is related to stimulus awareness but not &#x201C;needed&#x201D; (<xref ref-type="bibr" rid="ref2">Baars, 2002</xref>) for its occurrence: however, this evidence&#x2014;while contradicting an assumption shared by the so-called integration theories of consciousness (<xref ref-type="bibr" rid="ref49">Scott et al., 2018</xref>)&#x2014;does not represent a disconfirmation of each of these theories as a whole (on the contrary, as detailed previously in this chapter, it confirms other points). It is also worth noting that these results are currently based on an inevitably limited amount of data and should thus be interpreted with caution until they are corroborated by future research able to put into practice the improvements proposed in the next chapter.</p>
<p>The whole debate about consciousness was centered for decades on the idea that stimulus awareness and integration are necessarily interdependent (<xref ref-type="bibr" rid="ref34">Mudrik et al., 2014</xref>): the theories based on this assumption should evolve to fit with the evidence coming from the present study.</p>
</sec>
<sec id="sec10">
<title>Limitations of the study</title>
<p>Some noteworthy methodological issues are known to affect the scientific literature concerning subliminal stimuli and the interpretation of their behavioral or neuroimaging correlates (<xref ref-type="bibr" rid="ref59">Wiens, 2007</xref>; <xref ref-type="bibr" rid="ref3">Baroni et al., 2021</xref>; <xref ref-type="bibr" rid="ref22">Frumento et al., 2021</xref>, <xref ref-type="bibr" rid="ref21">2022</xref>; <xref ref-type="bibr" rid="ref60">Zher-Wen and Yu, 2023</xref>).</p>
<p>The most relevant problem typically concerns the meaning attributed to the term &#x201C;subliminal,&#x201D; and the reliability of methods used to label stimuli as such (<xref ref-type="bibr" rid="ref59">Wiens, 2007</xref>; <xref ref-type="bibr" rid="ref60">Zher-Wen and Yu, 2023</xref>). In fact, the mere calibration of a perceptual threshold is not sufficient to guarantee that all stimuli below this threshold will be not consciously perceived (nor that all stimuli above this threshold will be consciously perceived) (<xref ref-type="bibr" rid="ref21">Frumento et al., 2022</xref>): in addition, the intensity calibrated for tactile stimuli and for visual stimuli to result subliminal could sum up and induce a conscious experience of the bimodal stimulus.</p>
<p>The most reliable method to assess stimulus detection consists in a trial-by-trial report (not necessarily verbal) of its awareness (<xref ref-type="bibr" rid="ref58">Wiens, 2006</xref>), but this procedure implies decisional and motor processes the correlates of which could be misinterpreted as a clue of multisensory integration [the reason why no-report paradigms are preferable in studies primarily aimed at comparing neural correlates of conscious and non-conscious stimuli (<xref ref-type="bibr" rid="ref28">Kapoor et al., 2022</xref>)]. However, the main aim of the present study was to compare possible differences in behavioral and neuroimaging correlates of bimodal or unimodal stimuli that are subliminal, i.e., that do not imply any decisional or motor process to be labeled as subliminal (indeed, the lack of a report is the probe of their subliminality): the trial-by-trial assessment of stimulus detection allowed to exclude incongruent stimuli from analysis (see <xref ref-type="fig" rid="fig1">Figure 1D</xref>), thus eluding the possibility that supposed-to-be-subliminal bimodal stimuli were in fact consciously perceived because of a possible summation effect of the unimodal subliminal thresholds.</p>
<p>While adopting a robust methodology to calibrate stimuli intensity and to assess their detection, this procedure is necessarily based on subjective reports and can thus be affected by participant&#x2019;s psychological variables (e.g., interpretation of instructions, level of attention, and compliance with the experimenter). Nevertheless, subjective reports represent the best assessment technique for experiments centered on the administration of subliminal stimuli (<xref ref-type="bibr" rid="ref59">Wiens, 2007</xref>). To counterbalance the possible issues inevitably coming with this procedure, many measures were taken: intensity calibration needed to pass a rigid check-proof of its efficacy before each experiment; participants were instructed to mark stimuli as perceived when their confidence in the response was above chance, thus inducing the adoption of conservative criteria; stimulus detection was assessed on a trial-by-trial basis, following methodological indications coming from the scientific literature concerning subliminal stimulation (<xref ref-type="bibr" rid="ref3">Baroni et al., 2021</xref>; <xref ref-type="bibr" rid="ref22">Frumento et al., 2021</xref>, <xref ref-type="bibr" rid="ref21">2022</xref>; Frumento et al., <xref ref-type="bibr" rid="ref21">2022</xref>); a break allowed participants to restore their attention level and to maintain it constant during the whole experiment. Furthermore, we acknowledge that the sample size in our study, while yielding effect sizes indicative of strong effects, is relatively modest. Indeed, even though our sample size is placed within the generally employed standard in the field (<xref ref-type="bibr" rid="ref17">Ernst and Banks, 2002</xref>; <xref ref-type="bibr" rid="ref29">Laurino et al., 2014</xref>; <xref ref-type="bibr" rid="ref11">Dadarlat et al., 2015</xref>; <xref ref-type="bibr" rid="ref31">Marasco et al., 2018</xref>; <xref ref-type="bibr" rid="ref46">Risso et al., 2019</xref>, <xref ref-type="bibr" rid="ref45">2022</xref>), we understand that this limits the extent to which our findings can be generalized. Future studies with larger and more diverse populations are warranted to replicate and potentially expand upon our results, ensuring robustness and wider applicability within the field of subliminal stimulation research.</p>
</sec>
<sec sec-type="data-availability" id="sec11">
<title>Data availability statement</title>
<p>The datasets presented in this study can be found in online repositories. The names of the repository/repositories and accession number(s) can be found at: <ext-link xlink:href="https://osf.io/5wsnk/" ext-link-type="uri">https://osf.io/5wsnk/</ext-link>.</p>
</sec>
<sec sec-type="ethics-statement" id="sec12">
<title>Ethics statement</title>
<p>The studies involving humans were approved by Eidgen&#x00F6;ssische Technische Hochschule Z&#x00FC;rich (EK 2019-N-97). The studies were conducted in accordance with the local legislation and institutional requirements. The participants provided their written informed consent to participate in this study.</p>
</sec>
<sec sec-type="author-contributions" id="sec13">
<title>Author contributions</title>
<p>SF: Writing &#x2013; original draft, Visualization, Methodology, Investigation, Formal analysis, Data curation, Conceptualization. GP: Writing &#x2013; original draft, Visualization, Methodology, Investigation, Formal analysis, Data curation, Conceptualization. LC: Writing &#x2013; review &#x0026; editing, Validation, Software, Conceptualization. AG: Writing &#x2013; review &#x0026; editing, Supervision, Resources, Funding acquisition. FC: Writing &#x2013; review &#x0026; editing, Visualization, Formal analysis, Data curation. DM: Writing &#x2013; review &#x0026; editing, Visualization, Validation, Supervision, Methodology, Formal analysis, Data curation, Conceptualization. SR: Writing &#x2013; review &#x0026; editing, Supervision, Resources, Project administration, Methodology, Funding acquisition, Conceptualization.</p>
</sec>
</body>
<back>
<sec sec-type="funding-information" id="sec14">
<title>Funding</title>
<p>The author(s) declare that financial support was received for the research, authorship, and/or publication of this article. The salary of FS was provided by University&#x2019;s Minister at the time of the study. This study was supported by MUR (Italian Ministry of University and Research), the University of Pisa, and the Swiss National Science Foundation (SNSF) (MOVEIT 197271).</p>
</sec>
<sec sec-type="COI-statement" id="sec15">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="disclaimer" id="sec16">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<sec sec-type="supplementary-material" id="sec17">
<title>Supplementary material</title>
<p>The Supplementary material for this article can be found online at: <ext-link xlink:href="https://www.frontiersin.org/articles/10.3389/fpsyg.2024.1396946/full#supplementary-material" ext-link-type="uri">https://www.frontiersin.org/articles/10.3389/fpsyg.2024.1396946/full#supplementary-material</ext-link></p>
<supplementary-material xlink:href="Data_Sheet_1.docx" id="SM1" mimetype="application/vnd.openxmlformats-officedocument.wordprocessingml.document" xmlns:xlink="http://www.w3.org/1999/xlink"/>
</sec>
<ref-list>
<title>References</title>
<ref id="ref1">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Alf&#x00EC;</surname> <given-names>G.</given-names></name> <name><surname>Orr&#x00F9;</surname> <given-names>G.</given-names></name> <name><surname>Menicucci</surname> <given-names>D.</given-names></name> <name><surname>Miccoli</surname> <given-names>M.</given-names></name> <name><surname>Casigliani</surname> <given-names>V.</given-names></name> <name><surname>Totaro</surname> <given-names>M.</given-names></name> <etal/></person-group>. (<year>2023</year>). <article-title>A machine learning approach unveils the relationships between sickness behavior and interoception after vaccination: suggestions for psychometric indices of higher vulnerability</article-title>. <source>Healthcare</source> <volume>11</volume>:<fpage>2981</fpage>. doi: <pub-id pub-id-type="doi">10.3390/healthcare11222981</pub-id>, PMID: <pub-id pub-id-type="pmid">37998473</pub-id></citation>
</ref>
<ref id="ref2">
<citation citation-type="journal"><person-group person-group-type="author">
<name><surname>Baars</surname> <given-names>B. J.</given-names></name>
</person-group> (<year>2002</year>). <article-title>The conscious access hypothesis: origins and recent evidence</article-title>. <source>Trends Cogn. Sci.</source> <volume>6</volume>, <fpage>47</fpage>&#x2013;<lpage>52</lpage>. doi: <pub-id pub-id-type="doi">10.1016/S1364-6613(00)01819-2</pub-id>, PMID: <pub-id pub-id-type="pmid">11849615</pub-id></citation>
</ref>
<ref id="ref3">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Baroni</surname> <given-names>M.</given-names></name> <name><surname>Frumento</surname> <given-names>S.</given-names></name> <name><surname>Cesari</surname> <given-names>V.</given-names></name> <name><surname>Gemignani</surname> <given-names>A.</given-names></name> <name><surname>Menicucci</surname> <given-names>D.</given-names></name> <name><surname>Rutigliano</surname> <given-names>G.</given-names></name></person-group> (<year>2021</year>). <article-title>Unconscious processing of subliminal stimuli in panic disorder: a systematic review and meta-analysis</article-title>. <source>Neurosci. Biobehav. Rev.</source> <volume>128</volume>, <fpage>136</fpage>&#x2013;<lpage>151</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neubiorev.2021.06.023</pub-id>, PMID: <pub-id pub-id-type="pmid">34139247</pub-id></citation>
</ref>
<ref id="ref4">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bell</surname> <given-names>A. J.</given-names></name> <name><surname>Sejnowski</surname> <given-names>T. J.</given-names></name></person-group> (<year>1995</year>). <article-title>An information-maximization approach to blind separation and blind deconvolution</article-title>. <source>Neural Comput.</source> <volume>7</volume>, <fpage>1129</fpage>&#x2013;<lpage>1159</lpage>. doi: <pub-id pub-id-type="doi">10.1162/neco.1995.7.6.1129</pub-id>, PMID: <pub-id pub-id-type="pmid">7584893</pub-id></citation>
</ref>
<ref id="ref5">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bocharov</surname> <given-names>A. V.</given-names></name> <name><surname>Savostyanov</surname> <given-names>A. N.</given-names></name> <name><surname>Tamozhnikov</surname> <given-names>S. S.</given-names></name> <name><surname>Merkulova</surname> <given-names>E. A.</given-names></name> <name><surname>Saprigyn</surname> <given-names>A. E.</given-names></name> <name><surname>Proshina</surname> <given-names>E. A.</given-names></name> <etal/></person-group>. (<year>2020</year>). <article-title>Oscillatory dynamics of perception of emotional sentences in healthy subjects with different severity of depressive symptoms</article-title>. <source>Neurosci. Lett.</source> <volume>728</volume>:<fpage>134888</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neulet.2020.134888</pub-id>, PMID: <pub-id pub-id-type="pmid">32151710</pub-id></citation>
</ref>
<ref id="ref6">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cesari</surname> <given-names>V.</given-names></name> <name><surname>D&#x2019;Aversa</surname> <given-names>S.</given-names></name> <name><surname>Piarulli</surname> <given-names>A.</given-names></name> <name><surname>Melfi</surname> <given-names>F.</given-names></name> <name><surname>Gemignani</surname> <given-names>A.</given-names></name> <name><surname>Menicucci</surname> <given-names>D.</given-names></name></person-group> (<year>2024</year>). <article-title>Sense of agency and skills learning in virtual-mediated environment: a systematic review</article-title>. <source>Brain Sci.</source> <volume>14</volume>:<fpage>350</fpage>. doi: <pub-id pub-id-type="doi">10.3390/brainsci14040350</pub-id>, PMID: <pub-id pub-id-type="pmid">38672002</pub-id></citation>
</ref>
<ref id="ref7">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cesari</surname> <given-names>V.</given-names></name> <name><surname>Frumento</surname> <given-names>S.</given-names></name> <name><surname>Leo</surname> <given-names>A.</given-names></name> <name><surname>Baroni</surname> <given-names>M.</given-names></name> <name><surname>Rutigliano</surname> <given-names>G.</given-names></name> <name><surname>Gemignani</surname> <given-names>A.</given-names></name> <etal/></person-group>. (<year>2023</year>). <article-title>Functional correlates of subliminal stimulation in posttraumatic stress disorder: systematic review and meta-analysis</article-title>. <source>J. Affect. Disord.</source> <volume>337</volume>, <fpage>175</fpage>&#x2013;<lpage>185</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jad.2023.05.047</pub-id>, PMID: <pub-id pub-id-type="pmid">37236272</pub-id></citation>
</ref>
<ref id="ref8">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chee</surname> <given-names>L.</given-names></name> <name><surname>Valle</surname> <given-names>G.</given-names></name> <name><surname>Marazzi</surname> <given-names>M.</given-names></name> <name><surname>Preatoni</surname> <given-names>G.</given-names></name> <name><surname>Haufe</surname> <given-names>F. L.</given-names></name> <name><surname>Xiloyannis</surname> <given-names>M.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>Optimally-calibrated non-invasive feedback improves amputees&#x2019; metabolic consumption, balance and walking confidence</article-title>. <source>J Neural Eng.</source> <volume>19</volume>:<fpage>046049</fpage>. doi: <pub-id pub-id-type="doi">10.1088/1741-2552/ac883b</pub-id>, PMID: <pub-id pub-id-type="pmid">35944515</pub-id></citation>
</ref>
<ref id="ref9">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ching</surname> <given-names>A. S. M.</given-names></name> <name><surname>Kim</surname> <given-names>J.</given-names></name> <name><surname>Davis</surname> <given-names>C.</given-names></name></person-group> (<year>2019</year>). <article-title>Auditory&#x2013;visual integration during non-conscious perception</article-title>. <source>Cortex</source> <volume>117</volume>, <fpage>1</fpage>&#x2013;<lpage>15</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cortex.2019.02.014</pub-id>, PMID: <pub-id pub-id-type="pmid">30925308</pub-id></citation>
</ref>
<ref id="ref10">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cipriani</surname> <given-names>E.</given-names></name> <name><surname>Frumento</surname> <given-names>S.</given-names></name> <name><surname>Grassini</surname> <given-names>S.</given-names></name> <name><surname>Gemignani</surname> <given-names>A.</given-names></name> <name><surname>Menicucci</surname> <given-names>D.</given-names></name></person-group> (<year>2024</year>). <article-title>Do individual differences in perception affect awareness of climate change?</article-title> <source>Brain Sci.</source> <volume>14</volume>:<fpage>266</fpage>. doi: <pub-id pub-id-type="doi">10.3390/brainsci14030266</pub-id>, PMID: <pub-id pub-id-type="pmid">38539654</pub-id></citation>
</ref>
<ref id="ref11">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dadarlat</surname> <given-names>M. C.</given-names></name> <name><surname>O&#x2019;Doherty</surname> <given-names>J. E.</given-names></name> <name><surname>Sabes</surname> <given-names>P. N.</given-names></name></person-group> (<year>2015</year>). <article-title>A learning-based approach to artificial sensory feedback leads to optimal integration</article-title>. <source>Nat. Neurosci.</source> <volume>18</volume>, <fpage>138</fpage>&#x2013;<lpage>144</lpage>. doi: <pub-id pub-id-type="doi">10.1038/nn.3883</pub-id>, PMID: <pub-id pub-id-type="pmid">25420067</pub-id></citation>
</ref>
<ref id="ref12">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>de Gelder</surname> <given-names>B.</given-names></name> <name><surname>Tamietto</surname> <given-names>M.</given-names></name> <name><surname>van Boxtel</surname> <given-names>G.</given-names></name> <name><surname>Goebel</surname> <given-names>R.</given-names></name> <name><surname>Sahraie</surname> <given-names>A.</given-names></name> <name><surname>van den Stock</surname> <given-names>J.</given-names></name> <etal/></person-group>. (<year>2008</year>). <article-title>Intact navigation skills after bilateral loss of striate cortex</article-title>. <source>Curr. Biol.</source> <volume>18</volume>, <fpage>R1128</fpage>&#x2013;<lpage>R1129</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cub.2008.11.002</pub-id>, PMID: <pub-id pub-id-type="pmid">19108766</pub-id></citation>
</ref>
<ref id="ref13">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dehaene</surname> <given-names>S.</given-names></name> <name><surname>Changeux</surname> <given-names>J. P.</given-names></name> <name><surname>Naccache</surname> <given-names>L.</given-names></name> <name><surname>Sackur</surname> <given-names>J.</given-names></name> <name><surname>Sergent</surname> <given-names>C.</given-names></name></person-group> (<year>2006</year>). <article-title>Conscious, preconscious, and subliminal processing: a testable taxonomy</article-title>. <source>Trends Cogn. Sci.</source> <volume>10</volume>, <fpage>204</fpage>&#x2013;<lpage>211</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.tics.2006.03.007</pub-id>, PMID: <pub-id pub-id-type="pmid">16603406</pub-id></citation>
</ref>
<ref id="ref14">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dehaene</surname> <given-names>S.</given-names></name> <name><surname>Sergent</surname> <given-names>C.</given-names></name> <name><surname>Changeux</surname> <given-names>J. P.</given-names></name></person-group> (<year>2003</year>). <article-title>A neuronal network model linking subjective reports and objective physiological data during conscious perception</article-title>. <source>PNAS</source> <volume>100</volume>, <fpage>8520</fpage>&#x2013;<lpage>8525</lpage>. doi: <pub-id pub-id-type="doi">10.1073/pnas.1332574100</pub-id>, PMID: <pub-id pub-id-type="pmid">12829797</pub-id></citation>
</ref>
<ref id="ref15">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Deroy</surname> <given-names>O.</given-names></name> <name><surname>Chen</surname> <given-names>Y. C.</given-names></name> <name><surname>Spence</surname> <given-names>C.</given-names></name></person-group> (<year>2014</year>). <article-title>Multisensory constraints on awareness</article-title>. <source>Philos. Trans. R. Soc. B Biol. Sci.</source> <volume>369</volume>:<fpage>20130207</fpage>. doi: <pub-id pub-id-type="doi">10.1098/rstb.2013.0207</pub-id>, PMID: <pub-id pub-id-type="pmid">24639579</pub-id></citation>
</ref>
<ref id="ref16">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Driver</surname> <given-names>J.</given-names></name> <name><surname>Noesselt</surname> <given-names>T.</given-names></name></person-group> (<year>2008</year>). <article-title>Multisensory interplay reveals Crossmodal influences on &#x2018;sensory-specific&#x2019; brain regions, neural responses, and judgments</article-title>. <source>Neuron</source> <volume>57</volume>, <fpage>11</fpage>&#x2013;<lpage>23</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuron.2007.12.013</pub-id>, PMID: <pub-id pub-id-type="pmid">18184561</pub-id></citation>
</ref>
<ref id="ref17">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ernst</surname> <given-names>M. O.</given-names></name> <name><surname>Banks</surname> <given-names>M. S.</given-names></name></person-group> (<year>2002</year>). <article-title>Humans integrate visual and haptic information in a statistically optimal fashion</article-title>. <source>Nature</source> <volume>415</volume>, <fpage>429</fpage>&#x2013;<lpage>433</lpage>. doi: <pub-id pub-id-type="doi">10.1038/415429a</pub-id>, PMID: <pub-id pub-id-type="pmid">11807554</pub-id></citation>
</ref>
<ref id="ref18">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Faivre</surname> <given-names>N.</given-names></name> <name><surname>Arzi</surname> <given-names>A.</given-names></name> <name><surname>Lunghi</surname> <given-names>C.</given-names></name> <name><surname>Salomon</surname> <given-names>R.</given-names></name></person-group> (<year>2017</year>). <article-title>Consciousness is more than meets the eye: a call for a multisensory study of subjective experience</article-title>. <source>Neurosci. Conscious.</source> <volume>2017</volume>:<fpage>nix003</fpage>. doi: <pub-id pub-id-type="doi">10.1093/nc/nix003</pub-id>, PMID: <pub-id pub-id-type="pmid">30042838</pub-id></citation>
</ref>
<ref id="ref19">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Faivre</surname> <given-names>N.</given-names></name> <name><surname>Mudrik</surname> <given-names>L.</given-names></name> <name><surname>Schwartz</surname> <given-names>N.</given-names></name> <name><surname>Koch</surname> <given-names>C.</given-names></name></person-group> (<year>2014</year>). <article-title>Multisensory integration in complete unawareness: evidence from audiovisual congruency priming</article-title>. <source>Psychol. Sci.</source> <volume>25</volume>, <fpage>2006</fpage>&#x2013;<lpage>2016</lpage>. doi: <pub-id pub-id-type="doi">10.1177/0956797614547916</pub-id>, PMID: <pub-id pub-id-type="pmid">25269620</pub-id></citation>
</ref>
<ref id="ref20">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fossataro</surname> <given-names>C.</given-names></name> <name><surname>Galigani</surname> <given-names>M.</given-names></name> <name><surname>Rossi Sebastiano</surname> <given-names>A.</given-names></name> <name><surname>Bruno</surname> <given-names>V.</given-names></name> <name><surname>Ronga</surname> <given-names>I.</given-names></name> <name><surname>Garbarini</surname> <given-names>F.</given-names></name></person-group> (<year>2023</year>). <article-title>Spatial proximity to others induces plastic changes in the neural representation of the peripersonal space</article-title>. <source>iScience.</source> <volume>26</volume>:<fpage>105879</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.isci.2022.105879</pub-id>, PMID: <pub-id pub-id-type="pmid">36654859</pub-id></citation>
</ref>
<ref id="ref21">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Frumento</surname> <given-names>S.</given-names></name> <name><surname>Gemignani</surname> <given-names>A.</given-names></name> <name><surname>Menicucci</surname> <given-names>D.</given-names></name></person-group> (<year>2022</year>). <article-title>Perceptually visible but emotionally subliminal stimuli to improve exposure therapies</article-title>. <source>Brain Sci.</source> <volume>12</volume>:<fpage>867</fpage>. doi: <pub-id pub-id-type="doi">10.3390/brainsci12070867</pub-id>, PMID: <pub-id pub-id-type="pmid">35884675</pub-id></citation>
</ref>
<ref id="ref22">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Frumento</surname> <given-names>S.</given-names></name> <name><surname>Menicucci</surname> <given-names>D.</given-names></name> <name><surname>Hitchcott</surname> <given-names>P. K.</given-names></name> <name><surname>Zaccaro</surname> <given-names>A.</given-names></name> <name><surname>Gemignani</surname> <given-names>A.</given-names></name></person-group> (<year>2021</year>). <article-title>Systematic review of studies on subliminal exposure to phobic stimuli: integrating therapeutic models for specific phobias</article-title>. <source>Front. Neurosci.</source> <volume>15</volume>:<fpage>571</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fnins.2021.654170</pub-id>, PMID: <pub-id pub-id-type="pmid">34149346</pub-id></citation>
</ref>
<ref id="ref23">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gori</surname> <given-names>M.</given-names></name> <name><surname>Del Viva</surname> <given-names>M.</given-names></name> <name><surname>Sandini</surname> <given-names>G.</given-names></name> <name><surname>Burr</surname> <given-names>D. C.</given-names></name></person-group> (<year>2008</year>). <article-title>Young children do not integrate visual and haptic form information</article-title>. <source>Curr. Biol.</source> <volume>18</volume>, <fpage>694</fpage>&#x2013;<lpage>698</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cub.2008.04.036</pub-id>, PMID: <pub-id pub-id-type="pmid">18450446</pub-id></citation>
</ref>
<ref id="ref24">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hidaka</surname> <given-names>S.</given-names></name> <name><surname>Teramoto</surname> <given-names>W.</given-names></name> <name><surname>Sugita</surname> <given-names>Y.</given-names></name></person-group> (<year>2015</year>). <article-title>Spatiotemporal processing in Crossmodal interactions for perception of the external world: a review</article-title>. <source>Front. Integr. Neurosci.</source> <volume>9</volume>:<fpage>9</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fnint.2015.00062</pub-id>, PMID: <pub-id pub-id-type="pmid">26733827</pub-id></citation>
</ref>
<ref id="ref25">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Himberg</surname> <given-names>J.</given-names></name> <name><surname>Hyv&#x00E4;rinen</surname> <given-names>A.</given-names></name> <name><surname>Esposito</surname> <given-names>F.</given-names></name></person-group> (<year>2004</year>). <article-title>Validating the independent components of neuroimaging time series via clustering and visualization</article-title>. <source>NeuroImage</source> <volume>22</volume>, <fpage>1214</fpage>&#x2013;<lpage>1222</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2004.03.027</pub-id>, PMID: <pub-id pub-id-type="pmid">15219593</pub-id></citation>
</ref>
<ref id="ref26">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jung</surname> <given-names>T. P.</given-names></name> <name><surname>Makeig</surname> <given-names>S.</given-names></name> <name><surname>McKeown</surname> <given-names>M. J.</given-names></name> <name><surname>Bell</surname> <given-names>A. J.</given-names></name> <name><surname>Lee</surname> <given-names>T. W.</given-names></name> <name><surname>Sejnowski</surname> <given-names>T. J.</given-names></name></person-group> (<year>2001</year>). <article-title>Imaging brain dynamics using independent component analysis</article-title>. <source>Proc. IEEE</source> <volume>89</volume>, <fpage>1107</fpage>&#x2013;<lpage>1122</lpage>. doi: <pub-id pub-id-type="doi">10.1109/5.939827</pub-id>, PMID: <pub-id pub-id-type="pmid">20824156</pub-id></citation>
</ref>
<ref id="ref27">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Junghofer</surname> <given-names>M.</given-names></name> <name><surname>Elbert</surname> <given-names>T.</given-names></name> <name><surname>Tucker</surname> <given-names>D. M.</given-names></name> <name><surname>Rockstroh</surname> <given-names>B.</given-names></name></person-group> (<year>2000</year>). <article-title>Statistical control of artifacts in dense array EEG/MEG studies</article-title>. <source>Psychophysiology</source> <volume>37</volume>, <fpage>523</fpage>&#x2013;<lpage>532</lpage>. doi: <pub-id pub-id-type="doi">10.1111/1469-8986.3740523</pub-id>, PMID: <pub-id pub-id-type="pmid">10934911</pub-id></citation>
</ref>
<ref id="ref28">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kapoor</surname> <given-names>V.</given-names></name> <name><surname>Dwarakanath</surname> <given-names>A.</given-names></name> <name><surname>Safavi</surname> <given-names>S.</given-names></name> <name><surname>Werner</surname> <given-names>J.</given-names></name> <name><surname>Besserve</surname> <given-names>M.</given-names></name> <name><surname>Panagiotaropoulos</surname> <given-names>T. I.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>Decoding internally generated transitions of conscious contents in the prefrontal cortex without subjective reports</article-title>. <source>Nat. Commun.</source> <volume>13</volume>:<fpage>1535</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41467-022-28897-2</pub-id>, PMID: <pub-id pub-id-type="pmid">35318323</pub-id></citation>
</ref>
<ref id="ref29">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Laurino</surname> <given-names>M.</given-names></name> <name><surname>Menicucci</surname> <given-names>D.</given-names></name> <name><surname>Piarulli</surname> <given-names>A.</given-names></name> <name><surname>Mastorci</surname> <given-names>F.</given-names></name> <name><surname>Bedini</surname> <given-names>R.</given-names></name> <name><surname>Allegrini</surname> <given-names>P.</given-names></name> <etal/></person-group>. (<year>2014</year>). <article-title>Disentangling different functional roles of evoked K-complex components: mapping the sleeping brain while quenching sensory processing</article-title>. <source>NeuroImage</source> <volume>86</volume>, <fpage>433</fpage>&#x2013;<lpage>445</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2013.10.030</pub-id>, PMID: <pub-id pub-id-type="pmid">24513527</pub-id></citation>
</ref>
<ref id="ref30">
<citation citation-type="other"><person-group person-group-type="author"><name><surname>Makeig</surname> <given-names>S.</given-names></name> <name><surname>Bell</surname> <given-names>A.</given-names></name> <name><surname>Jung</surname> <given-names>T. P.</given-names></name> <name><surname>Sejnowski</surname> <given-names>T. J</given-names></name></person-group>. (<year>1996</year>). &#x201C;<article-title>Advances in neural information processing systems</article-title>&#x201D; in <source>Independent component analysis of electroencephalographic data</source>. <publisher-loc>Cambridge, MA</publisher-loc>: <publisher-name>MIT press</publisher-name>, vol. <volume>8</volume>, <fpage>145</fpage>&#x2013;<lpage>151</lpage>.</citation>
</ref>
<ref id="ref31">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Marasco</surname> <given-names>P. D.</given-names></name> <name><surname>Hebert</surname> <given-names>J. S.</given-names></name> <name><surname>Sensinger</surname> <given-names>J. W.</given-names></name> <name><surname>Shell</surname> <given-names>C. E.</given-names></name> <name><surname>Schofield</surname> <given-names>J. S.</given-names></name> <name><surname>Thumser</surname> <given-names>Z. C.</given-names></name> <etal/></person-group>. (<year>2018</year>). <article-title>Illusory movement perception improves motor control for prosthetic hands</article-title>. <source>Sci. Transl. Med.</source> <volume>10</volume>:<fpage>eaao6990</fpage>. doi: <pub-id pub-id-type="doi">10.1126/scitranslmed.aao6990</pub-id>, PMID: <pub-id pub-id-type="pmid">29540617</pub-id></citation>
</ref>
<ref id="ref32">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mcgurk</surname> <given-names>H.</given-names></name> <name><surname>Macdonald</surname> <given-names>J.</given-names></name></person-group> (<year>1976</year>). <article-title>Hearing lips and seeing voices</article-title>. <source>Nature</source> <volume>264</volume>, <fpage>746</fpage>&#x2013;<lpage>748</lpage>. doi: <pub-id pub-id-type="doi">10.1038/264746a0</pub-id>, PMID: <pub-id pub-id-type="pmid">1012311</pub-id></citation>
</ref>
<ref id="ref33">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Menicucci</surname> <given-names>D.</given-names></name> <name><surname>Artoni</surname> <given-names>F.</given-names></name> <name><surname>Bedini</surname> <given-names>R.</given-names></name> <name><surname>Pingitore</surname> <given-names>A.</given-names></name> <name><surname>Passera</surname> <given-names>M.</given-names></name> <name><surname>Landi</surname> <given-names>A.</given-names></name> <etal/></person-group>. (<year>2014</year>). <article-title>Brain responses to emotional stimuli during breath holding and hypoxia: an approach based on the independent component analysis</article-title>. <source>Brain Topogr.</source> <volume>27</volume>, <fpage>771</fpage>&#x2013;<lpage>785</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s10548-013-0349-z</pub-id>, PMID: <pub-id pub-id-type="pmid">24375284</pub-id></citation>
</ref>
<ref id="ref34">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mudrik</surname> <given-names>L.</given-names></name> <name><surname>Faivre</surname> <given-names>N.</given-names></name> <name><surname>Koch</surname> <given-names>C.</given-names></name></person-group> (<year>2014</year>). <article-title>Information integration without awareness</article-title>. <source>Trends Cogn. Sci.</source> <volume>18</volume>, <fpage>488</fpage>&#x2013;<lpage>496</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.tics.2014.04.009</pub-id></citation>
</ref>
<ref id="ref35">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Negen</surname> <given-names>J.</given-names></name> <name><surname>Chere</surname> <given-names>B.</given-names></name> <name><surname>Bird</surname> <given-names>L. A.</given-names></name> <name><surname>Taylor</surname> <given-names>E.</given-names></name> <name><surname>Roome</surname> <given-names>H. E.</given-names></name> <name><surname>Keenaghan</surname> <given-names>S.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>Sensory cue combination in children under 10 years of age</article-title>. <source>Cognition</source> <volume>193</volume>:<fpage>104014</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cognition.2019.104014</pub-id>, PMID: <pub-id pub-id-type="pmid">31302529</pub-id></citation>
</ref>
<ref id="ref36">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nierhaus</surname> <given-names>T.</given-names></name> <name><surname>Forschack</surname> <given-names>N.</given-names></name> <name><surname>Piper</surname> <given-names>S. K.</given-names></name> <name><surname>Holtze</surname> <given-names>S.</given-names></name> <name><surname>Krause</surname> <given-names>T.</given-names></name> <name><surname>Taskin</surname> <given-names>B.</given-names></name> <etal/></person-group>. (<year>2015</year>). <article-title>Imperceptible somatosensory stimulation alters sensorimotor background rhythm and connectivity</article-title>. <source>J. Neurosci.</source> <volume>35</volume>, <fpage>5917</fpage>&#x2013;<lpage>5925</lpage>. doi: <pub-id pub-id-type="doi">10.1523/JNEUROSCI.3806-14.2015</pub-id>, PMID: <pub-id pub-id-type="pmid">25878264</pub-id></citation>
</ref>
<ref id="ref37">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Noel</surname> <given-names>J. P.</given-names></name> <name><surname>Chatelle</surname> <given-names>C.</given-names></name> <name><surname>Perdikis</surname> <given-names>S.</given-names></name> <name><surname>J&#x00F6;hr</surname> <given-names>J.</given-names></name> <name><surname>Lopes da Silva</surname> <given-names>M.</given-names></name> <name><surname>Ryvlin</surname> <given-names>P.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>Peri-personal space encoding in patients with disorders of consciousness and cognitive-motor dissociation</article-title>. <source>NeuroImage Clin.</source> <volume>24</volume>:<fpage>101940</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.nicl.2019.101940</pub-id>, PMID: <pub-id pub-id-type="pmid">31357147</pub-id></citation>
</ref>
<ref id="ref38">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pfabigan</surname> <given-names>D. M.</given-names></name> <name><surname>Seidel</surname> <given-names>E. M.</given-names></name> <name><surname>Sladky</surname> <given-names>R.</given-names></name> <name><surname>Hahn</surname> <given-names>A.</given-names></name> <name><surname>Paul</surname> <given-names>K.</given-names></name> <name><surname>Grahl</surname> <given-names>A.</given-names></name> <etal/></person-group>. (<year>2014</year>). <article-title>P300 amplitude variation is related to ventral striatum BOLD response during gain and loss anticipation: an EEG and fMRI experiment</article-title>. <source>NeuroImage</source> <volume>96</volume>, <fpage>12</fpage>&#x2013;<lpage>21</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2014.03.077</pub-id>, PMID: <pub-id pub-id-type="pmid">24718288</pub-id></citation>
</ref>
<ref id="ref39">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Piarulli</surname> <given-names>A.</given-names></name> <name><surname>Menicucci</surname> <given-names>D.</given-names></name> <name><surname>Gemignani</surname> <given-names>A.</given-names></name> <name><surname>Olcese</surname> <given-names>U.</given-names></name> <name><surname>d'Ascanio</surname> <given-names>P.</given-names></name> <name><surname>Pingitore</surname> <given-names>A.</given-names></name> <etal/></person-group>. (<year>2010</year>). <article-title>Likeness-based detection of sleep slow oscillations in Normal and altered sleep conditions: application on low-density EEG recordings</article-title>. <source>IEEE Trans. Biomed. Eng.</source> <volume>57</volume>, <fpage>363</fpage>&#x2013;<lpage>372</lpage>. doi: <pub-id pub-id-type="doi">10.1109/TBME.2009.2031983</pub-id>, PMID: <pub-id pub-id-type="pmid">19770081</pub-id></citation>
</ref>
<ref id="ref40">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pion-Tonachini</surname> <given-names>L.</given-names></name> <name><surname>Kreutz-Delgado</surname> <given-names>K.</given-names></name> <name><surname>Makeig</surname> <given-names>S.</given-names></name></person-group> (<year>2019</year>). <article-title>ICLabel: an automated electroencephalographic independent component classifier, dataset, and website</article-title>. <source>NeuroImage</source> <volume>198</volume>, <fpage>181</fpage>&#x2013;<lpage>197</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2019.05.026</pub-id>, PMID: <pub-id pub-id-type="pmid">31103785</pub-id></citation>
</ref>
<ref id="ref41">
<citation citation-type="confproc"><person-group person-group-type="author"><name><surname>Preatoni</surname> <given-names>G.</given-names></name> <name><surname>Bracher</surname> <given-names>N. M.</given-names></name> <name><surname>Raspopovic</surname> <given-names>S</given-names></name></person-group>. <article-title>Towards a future VR-TENS multimodal platform to treat neuropathic pain</article-title>. In: <conf-name>2021 10th International IEEE/EMBS Conference on Neural Engineering (NER)</conf-name>. <publisher-name>IEEE</publisher-name>; (<year>2021</year>):<fpage>1105</fpage>&#x2013;<lpage>1108</lpage>.</citation>
</ref>
<ref id="ref42">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Preatoni</surname> <given-names>G.</given-names></name> <name><surname>Valle</surname> <given-names>G.</given-names></name> <name><surname>Petrini</surname> <given-names>F. M.</given-names></name> <name><surname>Raspopovic</surname> <given-names>S.</given-names></name></person-group> (<year>2021</year>). <article-title>Lightening the perceived prosthesis weight with neural embodiment promoted by sensory feedback</article-title>. <source>Curr. Biol.</source> <volume>31</volume>, <fpage>1065</fpage>&#x2013;<lpage>1071.e4</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cub.2020.11.069</pub-id>, PMID: <pub-id pub-id-type="pmid">33417885</pub-id></citation>
</ref>
<ref id="ref43">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Race</surname> <given-names>D. S.</given-names></name> <name><surname>Hillis</surname> <given-names>A. B.</given-names></name></person-group> (<year>2015</year>). <article-title>Naming</article-title>. <source>Brain Mapp.</source> <volume>2</volume>, <fpage>671</fpage>&#x2013;<lpage>675</lpage>. doi: <pub-id pub-id-type="doi">10.1016/B978-0-12-397025-1.00066-X</pub-id></citation>
</ref>
<ref id="ref44">
<citation citation-type="journal"><person-group person-group-type="author">
<name><surname>Raspopovic</surname> <given-names>S.</given-names></name>
</person-group> (<year>2020</year>). <article-title>Advancing limb neural prostheses</article-title>. <source>Science</source> <volume>370</volume>, <fpage>290</fpage>&#x2013;<lpage>291</lpage>. doi: <pub-id pub-id-type="doi">10.1126/science.abb1073</pub-id></citation>
</ref>
<ref id="ref45">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Risso</surname> <given-names>G.</given-names></name> <name><surname>Preatoni</surname> <given-names>G.</given-names></name> <name><surname>Valle</surname> <given-names>G.</given-names></name> <name><surname>Marazzi</surname> <given-names>M.</given-names></name> <name><surname>Bracher</surname> <given-names>N. M.</given-names></name> <name><surname>Raspopovic</surname> <given-names>S.</given-names></name></person-group> (<year>2022</year>). <article-title>Multisensory stimulation decreases phantom limb distortions and is optimally integrated</article-title>. <source>iScience</source> <volume>25</volume>:<fpage>104129</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.isci.2022.104129</pub-id>, PMID: <pub-id pub-id-type="pmid">35391829</pub-id></citation>
</ref>
<ref id="ref46">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Risso</surname> <given-names>G.</given-names></name> <name><surname>Valle</surname> <given-names>G.</given-names></name> <name><surname>Iberite</surname> <given-names>F.</given-names></name> <name><surname>Strauss</surname> <given-names>I.</given-names></name> <name><surname>Stieglitz</surname> <given-names>T.</given-names></name> <name><surname>Controzzi</surname> <given-names>M.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>Optimal integration of intraneural somatosensory feedback with visual information: a single-case study</article-title>. <source>Sci. Rep.</source> <volume>9</volume>:<fpage>7916</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41598-019-43815-1</pub-id>, PMID: <pub-id pub-id-type="pmid">31133637</pub-id></citation>
</ref>
<ref id="ref47">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ronga</surname> <given-names>I.</given-names></name> <name><surname>Galigani</surname> <given-names>M.</given-names></name> <name><surname>Bruno</surname> <given-names>V.</given-names></name> <name><surname>Castellani</surname> <given-names>N.</given-names></name> <name><surname>Rossi Sebastiano</surname> <given-names>A.</given-names></name> <name><surname>Valentini</surname> <given-names>E.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>Seeming confines: electrophysiological evidence of peripersonal space remapping following tool-use in humans</article-title>. <source>Cortex</source> <volume>144</volume>, <fpage>133</fpage>&#x2013;<lpage>150</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cortex.2021.08.004</pub-id>, PMID: <pub-id pub-id-type="pmid">34666298</pub-id></citation>
</ref>
<ref id="ref48">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rossi Sebastiano</surname> <given-names>A.</given-names></name> <name><surname>Bruno</surname> <given-names>V.</given-names></name> <name><surname>Ronga</surname> <given-names>I.</given-names></name> <name><surname>Fossataro</surname> <given-names>C.</given-names></name> <name><surname>Galigani</surname> <given-names>M.</given-names></name> <name><surname>Neppi-Modona</surname> <given-names>M.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>Diametrical modulation of tactile and visual perceptual thresholds during the rubber hand illusion: a predictive coding account</article-title>. <source>Psychol. Res.</source> <volume>86</volume>, <fpage>1830</fpage>&#x2013;<lpage>1846</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s00426-021-01608-0</pub-id>, PMID: <pub-id pub-id-type="pmid">34773491</pub-id></citation>
</ref>
<ref id="ref49">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Scott</surname> <given-names>R. B.</given-names></name> <name><surname>Samaha</surname> <given-names>J.</given-names></name> <name><surname>Chrisley</surname> <given-names>R.</given-names></name> <name><surname>Dienes</surname> <given-names>Z.</given-names></name></person-group> (<year>2018</year>). <article-title>Prevailing theories of consciousness are challenged by novel cross-modal associations acquired between subliminal stimuli</article-title>. <source>Cognition</source> <volume>175</volume>, <fpage>169</fpage>&#x2013;<lpage>185</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cognition.2018.02.008</pub-id>, PMID: <pub-id pub-id-type="pmid">29544152</pub-id></citation>
</ref>
<ref id="ref50">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sergent</surname> <given-names>C.</given-names></name> <name><surname>Corazzol</surname> <given-names>M.</given-names></name> <name><surname>Labouret</surname> <given-names>G.</given-names></name> <name><surname>Stockart</surname> <given-names>F.</given-names></name> <name><surname>Wexler</surname> <given-names>M.</given-names></name> <name><surname>King</surname> <given-names>J. R.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>Bifurcation in brain dynamics reveals a signature of conscious processing independent of report</article-title>. <source>Nat. Commun.</source> <volume>12</volume>:<fpage>1149</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41467-021-21393-z</pub-id>, PMID: <pub-id pub-id-type="pmid">33608533</pub-id></citation>
</ref>
<ref id="ref51">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Seth</surname> <given-names>A. K.</given-names></name> <name><surname>Bayne</surname> <given-names>T.</given-names></name></person-group> (<year>2022</year>). <article-title>Theories of consciousness</article-title>. <source>Nat. Rev. Neurosci.</source> <volume>23</volume>, <fpage>439</fpage>&#x2013;<lpage>452</lpage>. doi: <pub-id pub-id-type="doi">10.1038/s41583-022-00587-4</pub-id></citation>
</ref>
<ref id="ref52">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Setti</surname> <given-names>F.</given-names></name> <name><surname>Handjaras</surname> <given-names>G.</given-names></name> <name><surname>Bottari</surname> <given-names>D.</given-names></name> <name><surname>Leo</surname> <given-names>A.</given-names></name> <name><surname>Diano</surname> <given-names>M.</given-names></name> <name><surname>Bruno</surname> <given-names>V.</given-names></name> <etal/></person-group>. (<year>2023</year>). <article-title>A modality-independent proto-organization of human multisensory areas</article-title>. <source>Nat. Hum. Behav.</source> <volume>7</volume>, <fpage>397</fpage>&#x2013;<lpage>410</lpage>. doi: <pub-id pub-id-type="doi">10.1038/s41562-022-01507-3</pub-id>, PMID: <pub-id pub-id-type="pmid">36646839</pub-id></citation>
</ref>
<ref id="ref53">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Stein</surname> <given-names>B. E.</given-names></name> <name><surname>Stanford</surname> <given-names>T. R.</given-names></name> <name><surname>Rowland</surname> <given-names>B. A.</given-names></name></person-group> (<year>2014</year>). <article-title>Development of multisensory integration from the perspective of the individual neuron</article-title>. <source>Nat. Rev. Neurosci.</source> <volume>15</volume>, <fpage>520</fpage>&#x2013;<lpage>535</lpage>. doi: <pub-id pub-id-type="doi">10.1038/nrn3742</pub-id>, PMID: <pub-id pub-id-type="pmid">25158358</pub-id></citation>
</ref>
<ref id="ref54">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tamietto</surname> <given-names>M.</given-names></name> <name><surname>Cauda</surname> <given-names>F.</given-names></name> <name><surname>Corazzini</surname> <given-names>L. L.</given-names></name> <name><surname>Savazzi</surname> <given-names>S.</given-names></name> <name><surname>Marzi</surname> <given-names>C. A.</given-names></name> <name><surname>Goebel</surname> <given-names>R.</given-names></name> <etal/></person-group>. (<year>2010</year>). <article-title>Collicular vision guides non-conscious behavior</article-title>. <source>J. Cogn. Neurosci.</source> <volume>22</volume>, <fpage>888</fpage>&#x2013;<lpage>902</lpage>. doi: <pub-id pub-id-type="doi">10.1162/jocn.2009.21225</pub-id>, PMID: <pub-id pub-id-type="pmid">19320547</pub-id></citation>
</ref>
<ref id="ref55">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tononi</surname> <given-names>G.</given-names></name> <name><surname>Boly</surname> <given-names>M.</given-names></name> <name><surname>Massimini</surname> <given-names>M.</given-names></name> <name><surname>Koch</surname> <given-names>C.</given-names></name></person-group> (<year>2016</year>). <article-title>Integrated information theory: from consciousness to its physical substrate</article-title>. <source>Nat. Rev. Neurosci.</source> <volume>17</volume>, <fpage>450</fpage>&#x2013;<lpage>461</lpage>. doi: <pub-id pub-id-type="doi">10.1038/nrn.2016.44</pub-id>, PMID: <pub-id pub-id-type="pmid">27225071</pub-id></citation>
</ref>
<ref id="ref56">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tsuchiya</surname> <given-names>N.</given-names></name> <name><surname>Wilke</surname> <given-names>M.</given-names></name> <name><surname>Fr&#x00E4;ssle</surname> <given-names>S.</given-names></name> <name><surname>Lamme</surname> <given-names>V. A. F.</given-names></name></person-group> (<year>2015</year>). <article-title>No-report paradigms: extracting the true neural correlates of consciousness</article-title>. <source>Trends Cogn. Sci.</source> <volume>19</volume>, <fpage>757</fpage>&#x2013;<lpage>770</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.tics.2015.10.002</pub-id>, PMID: <pub-id pub-id-type="pmid">26585549</pub-id></citation>
</ref>
<ref id="ref57">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Valle</surname> <given-names>G.</given-names></name> <name><surname>Preatoni</surname> <given-names>G.</given-names></name> <name><surname>Raspopovic</surname> <given-names>S.</given-names></name></person-group> (<year>2021</year>). &#x201C;<article-title>Connecting residual nervous system and prosthetic legs for sensorimotor and cognitive rehabilitation</article-title>&#x201D; in <source>Somatosensory Feedback for Neuroprosthetics</source> (<publisher-loc>London</publisher-loc>: <publisher-name>Elsevier</publisher-name>), <fpage>293</fpage>&#x2013;<lpage>320</lpage>.</citation>
</ref>
<ref id="ref58">
<citation citation-type="journal"><person-group person-group-type="author">
<name><surname>Wiens</surname> <given-names>S.</given-names></name>
</person-group> (<year>2006</year>). <article-title>Current concerns in visual masking</article-title>. <source>Emotion</source> <volume>6</volume>, <fpage>675</fpage>&#x2013;<lpage>680</lpage>. doi: <pub-id pub-id-type="doi">10.1037/1528-3542.6.4.675</pub-id></citation>
</ref>
<ref id="ref59">
<citation citation-type="journal"><person-group person-group-type="author">
<name><surname>Wiens</surname> <given-names>S.</given-names></name>
</person-group> (<year>2007</year>). <article-title>Concepts of visual consciousness and their measurement</article-title>. <source>Adv. Cogn. Psychol.</source> <volume>3</volume>, <fpage>349</fpage>&#x2013;<lpage>359</lpage>. doi: <pub-id pub-id-type="doi">10.2478/v10053-008-0035-y</pub-id>, PMID: <pub-id pub-id-type="pmid">20517519</pub-id></citation>
</ref>
<ref id="ref60">
<citation citation-type="journal"><person-group person-group-type="author"><name>
<surname>Zher-Wen</surname>
</name> <name><surname>Yu</surname> <given-names>R.</given-names></name></person-group> (<year>2023</year>). <article-title>Unconscious integration: current evidence for integrative processing under subliminal conditions</article-title>. <source>Br. J. Psychol.</source> <volume>114</volume>, <fpage>430</fpage>&#x2013;<lpage>456</lpage>. doi: <pub-id pub-id-type="doi">10.1111/bjop.12631</pub-id>, PMID: <pub-id pub-id-type="pmid">36689339</pub-id></citation>
</ref>
</ref-list>
</back>
</article>