<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article article-type="brief-report" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" dtd-version="1.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Psychol.</journal-id>
<journal-title-group>
<journal-title>Frontiers in Psychology</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Psychol.</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">1664-1078</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpsyg.2025.1644393</article-id><article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading"><subject>Brief Research Report</subject></subj-group>
</article-categories>
<title-group>
<article-title>Touching soft materials slows affective visual processing</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Pasqualotto</surname>
<given-names>Achille</given-names>
</name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3095700"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x0026; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Leong</surname>
<given-names>Utek</given-names>
</name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x0026; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Kitada</surname>
<given-names>Ryo</given-names>
</name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/31880"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &#x0026; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role>
</contrib>
</contrib-group>
<aff id="aff1"><label>1</label><institution>School of Social Sciences, Nanyang Technological University</institution>, <city>Singapore</city>, <country country="sg">Singapore</country></aff>
<aff id="aff2"><label>2</label><institution>Faculty of Human Sciences, University of Tsukuba</institution>, <city>Tsukuba</city>, <country country="jp">Japan</country></aff>
<aff id="aff3"><label>3</label><institution>Japan Society for the Promotion of Science (JSPS)</institution>, <city>Tokyo</city>, <country country="jp">Japan</country></aff>
<aff id="aff4"><label>4</label><institution>Department of Psychology, National University of Singapore</institution>, <city>Singapore</city>, <country country="sg">Singapore</country></aff>
<aff id="aff5"><label>5</label><institution>Graduate School of Intercultural Studies, Kobe University</institution>, <city>Kobe</city>, <country country="jp">Japan</country></aff>
<author-notes><corresp id="c001"><label>&#x002A;</label>Correspondence: Achille Pasqualotto, <email xlink:href="mailto:pasqualotto.achil.fw@u.tsukuba.ac.jp">pasqualotto.achil.fw@u.tsukuba.ac.jp</email></corresp></author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2025-11-26">
<day>26</day>
<month>11</month>
<year>2025</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2025</year>
</pub-date>
<volume>16</volume>
<elocation-id>1644393</elocation-id>
<history>
<date date-type="received">
<day>10</day>
<month>06</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>30</day>
<month>10</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2025 Pasqualotto, Leong and Kitada.</copyright-statement>
<copyright-year>2025</copyright-year>
<copyright-holder>Pasqualotto, Leong and Kitada</copyright-holder>
<license><ali:license_ref start_date="2025-11-26">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<p>Presently, there is extensive evidence of multisensory integration in tactile and visual processing. While it has been shown that multisensory interaction between touch and vision influences many cognitive processes, such as object recognition, the role of multisensory interaction in the affective domain is still poorly understood. The aim of this study was to examine the influence of tactile perception on the affective processing of visual stimuli. Two experiments were conducted with urethane rubbers of differing compliance and with visually presented words. In the first experiment, participants rated the affective valence of the visually presented words while touching hard or soft urethane rubbers. Ratings and reaction times were recorded. Results showed touching a soft stimulus slowed the valence rating of visual words, but it did not affect the valence ratings per se. A second experiment clarified whether this effect was unique to valence (affective) ratings or whether it extended to semantic (cognitive) ratings as well. The second experiment was identical to the first one, but here participants rated the level of abstractness of the same visually presented words. Results indicated that abstractness ratings were not affected by the tactile stimuli. Overall, these confirm that, possibly via an attentional mechanism, tactile input influences the speed of affective visual processing.</p>
</abstract>
<kwd-group>
<kwd>multisensory integration</kwd>
<kwd>vision</kwd>
<kwd>touch</kwd>
<kwd>valence</kwd>
<kwd>affective processing</kwd>
</kwd-group>
<funding-group><award-group id="gs1"><funding-source id="sp1"><institution-wrap><institution>Japan Society for the Promotion of Science</institution><institution-id institution-id-type="doi" vocab="open-funder-registry" vocab-identifier="10.13039/open_funder_registry">10.13039/501100001691</institution-id></institution-wrap></funding-source></award-group><award-group id="gs2"><funding-source id="sp2"><institution-wrap><institution>Nanyang Technological University</institution><institution-id institution-id-type="doi" vocab="open-funder-registry" vocab-identifier="10.13039/open_funder_registry">10.13039/501100001475</institution-id></institution-wrap></funding-source></award-group><award-group id="gs3"><funding-source id="sp3"><institution-wrap><institution>JSPS KAKENHI</institution></institution-wrap></funding-source></award-group><funding-statement>The author(s) declare that financial support was received for the research and/or publication of this article. This project was supported by a Japan Society for the Promotion of Science (JSPS) KAKENHI grant (24K06616) awarded to AP; a NAP start-up grant from Nanyang Technological University and a MEXT/JSPS KAKENHI grant (25H00581) to RK; and the Undergraduate Research Experience on CAmpus (U.R.E.CA.) program at Nanyang Technological University.</funding-statement></funding-group><counts>
<fig-count count="3"/>
<table-count count="0"/>
<equation-count count="0"/>
<ref-count count="56"/>
<page-count count="9"/>
<word-count count="6403"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Cognition</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="sec1">
<label>1</label>
<title>Introduction</title>
<p>In our daily lives, we tend to think of our senses as distinct modalities that individually afford the perception of the objects around us. However, we often disregard how our senses work together to enhance our perceptions and how important this integration is in our daily lives (<xref ref-type="bibr" rid="ref47">Stein, 2012</xref>).</p>
<p>Of particular interest is the interaction between vision and touch, which encompasses object recognition (<xref ref-type="bibr" rid="ref27">Klatzky and Lederman, 2011</xref>; <xref ref-type="bibr" rid="ref30">Lacey and Sathian, 2011</xref>; <xref ref-type="bibr" rid="ref34">Martinovic et al., 2012</xref>; <xref ref-type="bibr" rid="ref36">Newell et al., 2001</xref>), object localization (<xref ref-type="bibr" rid="ref4">Cattaneo and Vecchi, 2008</xref>; <xref ref-type="bibr" rid="ref39">Pasqualotto et al., 2013</xref>; <xref ref-type="bibr" rid="ref53">Woods et al., 2004</xref>; <xref ref-type="bibr" rid="ref56">Zuidhoek et al., 2004</xref>), as well as body representation (<xref ref-type="bibr" rid="ref6">Della Longa et al., 2022</xref>; <xref ref-type="bibr" rid="ref19">Kaneno et al., 2024</xref>; <xref ref-type="bibr" rid="ref41">Pasqualotto and Proulx, 2015</xref>). Indeed, these two modalities complement one another by providing us with distinct sets of information (e.g., color by vision and temperature by touch). Therefore, multisensory integration between vision and touch has been broadly investigated (e.g., <xref ref-type="bibr" rid="ref38">Pasqualotto et al., 2016</xref>), yet the affective component of visuo-tactile integration has been far less studied and understood (<xref ref-type="bibr" rid="ref12">Filippetti et al., 2019</xref>; <xref ref-type="bibr" rid="ref35">Morrison, 2016</xref>). We focused on touch because it is well-suited for affective processing (e.g., <xref ref-type="bibr" rid="ref5">Cavdan et al., 2023</xref>; <xref ref-type="bibr" rid="ref7">Drewing et al., 2018</xref>; <xref ref-type="bibr" rid="ref24">Kirsch et al., 2020</xref>; <xref ref-type="bibr" rid="ref15">Guest et al., 2011</xref>; <xref ref-type="bibr" rid="ref40">Pasqualotto et al., 2020</xref>). For example, our studies found evidence that soft tactile stimuli engender pleasantness (<xref ref-type="bibr" rid="ref40">Pasqualotto et al., 2020</xref>), and softness perception activates the insula, a region of the brain involved in affective processing (<xref ref-type="bibr" rid="ref25">Kitada et al., 2019</xref>).</p>
<p>Previous studies suggested that perceiving pleasurable attributes in one sensory modality (e.g., touch) affects the overall multisensory experience of a product by biasing affective perceptions of other sensory modalities (e.g., vision and audition) (see <xref ref-type="bibr" rid="ref44">Spence, 2022</xref>; <xref ref-type="bibr" rid="ref45">Spence and Gallace, 2011</xref> for reviews). <xref ref-type="bibr" rid="ref51">Suzuki and Gyoba (2008)</xref> found that the repeated visual exposure to novel stimuli (mere-exposure effect, <xref ref-type="bibr" rid="ref55">Zajonc, 1968</xref>) increased the participants&#x2019; subsequent preference for those stimuli when judged by touch. <xref ref-type="bibr" rid="ref54">Wu et al. (2011)</xref> exposed participants to multisensory (visual and tactile) stimuli and found a similar mere-exposure effect for affective judgements within the visual domain. Are the results of these studies examples of multisensory effective priming? Although unisensory (vision), the study by <xref ref-type="bibr" rid="ref42">Pecchinenda et al. (2014)</xref> offers an interesting methodology. In their Experiment 1, symmetric/asymmetric &#x2018;clouds&#x2019; of dots preceded words that participants categorized as positive or negative. Authors found that symmetric clouds improved the categorization of positive words. This study belongs to the vast literature on affective visual priming, reporting congruency effects between a priming stimulus and the response of the observers (both in terms of accuracy and reaction times) (<xref ref-type="bibr" rid="ref9">Fazio, 2001</xref>; <xref ref-type="bibr" rid="ref18">Hermans et al., 1994</xref>, <xref ref-type="bibr" rid="ref17">2001</xref>). Here, symmetric clouds were perceived as more pleasant (<xref ref-type="bibr" rid="ref13">Friedenberg and Bertamini, 2015</xref>), thus improving the processing of positive words (congruency). Would the same results stand in a multisensory setting?</p>
<p>We decided to investigate the effect of tactile stimuli on affective visual processing adapting the affective priming used by <xref ref-type="bibr" rid="ref42">Pecchinenda et al. (2014)</xref>, but utilised urethane rubbers of different compliance (soft/hard) as tactile stimuli (rather than clouds of dots), and words of varied valence (positive/neutral/negative) as visual stimuli, which participants rated in terms of valence. These urethane rubbers have been used in other experiments of ours experiments of yours (e.g., <xref ref-type="bibr" rid="ref40">Pasqualotto et al., 2020</xref>), and we know their physical characteristics and pleasantness. To our knowledge, no study has investigated affective priming between tactile softness and words of valence.</p>
<p>We expect that the characteristics of the tactile stimuli (soft/hard) will interact with the valence of the visual stimuli (positive/neutral/ negative), thus influencing participants&#x2019; affective judgement of the visually presented word.</p>
</sec>
<sec id="sec2">
<label>2</label>
<title>Experiment 1</title>
<p>The first experiment tested our principal hypothesis about the effect of touch on affective visual processing. Here, participants evaluated the valence of words presented on a computer screen, <italic>while</italic> they were touching hard/soft urethane rubbers. Beforehand, a pilot study helped us to choose and classify the visual words.</p>
<sec id="sec3">
<label>2.1</label>
<title>Materials and methods</title>
<sec id="sec4">
<label>2.1.1</label>
<title>Participants</title>
<p>Twenty-four (12 male and 12 female) right-handed individuals aged between 19 and 32 (mean 23.42&#x202F;years) participated to Experiment 1 and were recruited via posters placed around Nanyang Technological University&#x2019;s campus. The minimum sample size was determined by the experiments on affective priming presented by <xref ref-type="bibr" rid="ref42">Pecchinenda et al. (2014)</xref>. Participants&#x2019; handedness was obtained using the Fazio Laterality Inventory (FLI; <xref ref-type="bibr" rid="ref10">Fazio et al., 2013</xref>). Participants did not present any tactile impairments or injuries on their hands and had normal or corrected-to-normal vision. All of them provided written informed consent before starting the experiment (or the pilot, see below). All studies were approved by the Institutional Review Board (IRB-2018-07-013) at Nanyang Technological University, thus are in accordance with the ethical standards of the Helsinki Declaration of 1975, as revised in 2000. Participants received 10 Singaporean Dollars (SGD) for their participation (5 SGD for the pilot).</p>
</sec>
<sec id="sec5">
<label>2.1.2</label>
<title>Apparatus</title>
<p>In order to select the visual stimuli for Experiment 1, a pilot study (<italic>N</italic>&#x202F;=&#x202F;10; five male and five female, average age 23.6&#x202F;years, with normal or corrected-to-normal vision) was run using 130 words from the Affective Norms for English Words (ANEW; <xref ref-type="bibr" rid="ref2">Bradley and Lang, 1999</xref>) and the software Presentation&#x2122; (Berkeley, USA). Participants were randomly presented with these words twice; during one presentation, participants rated the <italic>Valence</italic> of the words (from &#x201C;not positive at all&#x201D; to &#x201C;very positive&#x201D;) on a scale from 1-to-9 (with 1 indicating &#x201C;not positive at all&#x201D;), while in the other presentation participants rated how much a word was associated with tactile sensations (from &#x201C;not associated at all&#x201D; to &#x201C;very associated&#x201D;) on a scale from 1-to-9 (with 1 indicating &#x201C;not associated at all&#x201D;). For example, the word &#x201C;book&#x201D; would have a Tactile Association score higher than the word &#x201C;cloud.&#x201D; Knowing the Tactile Association ratings was necessary to ensure that all the selected words had small and comparable pre-existing associations with tactile sensations, thus preventing potential confounds in Experiment 1. The tasks&#x2019; order (Valence rating and Tactile Association rating) was counterbalanced across participants.</p>
<p>The average Valence and Tactile Association ratings were calculated for each word and then words were ordered by their Valence. Then we selected the top eight words (i.e., those with the highest Valence, or Positive Words, e.g., &#x201C;Free&#x201D;), the bottom eight words (those with the lowest Valence, or Negative Words, e.g., &#x201C;Death&#x201D;), and the eight words that were around the average Valence rating (5.72), or Neutral Words (e.g., &#x201C;Farm&#x201D;). At the same time, to avoid biases, the selected words had to have Tactile Association ratings below the average Tactile Association (4.53). The selected words (see Supplementary Table S1 for details) were visually presented in Experiment 1.</p>
<p>Four polyurethane rubbers (Kat&#x014D; Tech, Kyoto, Japan) with differing compliance were used in Experiment 1; rubber A (compliance: 0.13&#x202F;mm/N), rubber B (0.45&#x202F;mm/N), rubber H (7.56&#x202F;mm/N), and rubber I (10.53&#x202F;mm/N). Compliance indicates rubbers&#x2019; indentation when the same pressure is applied [refer to <xref ref-type="bibr" rid="ref40">Pasqualotto et al. (2020)</xref> for a detailed explanation of the compliance measurement]. Therefore, as indicated by their low compliance values, rubbers A and B were the &#x201C;Hard&#x201D; rubbers, while H and I where the &#x201C;Soft&#x201D; rubbers. Additionally, based to our previous study (<xref ref-type="bibr" rid="ref40">Pasqualotto et al., 2020</xref>), we know that soft rubbers (H and I) were more pleasant than hard rubbers (A and B). To ensure that rubbers were presented in the same manner, we used the device called Model SHR III-5 SK (Aikoh Engineering, Osaka, Japan) to press the rubbers onto three fingers of the participants at the same speed (5&#x202F;cm/s) and with the same maximum applied force (5&#x202F;N). This device was used in our previous studies (e.g., <xref ref-type="bibr" rid="ref40">Pasqualotto et al., 2020</xref>).</p>
</sec>
<sec id="sec6">
<label>2.1.3</label>
<title>Procedure</title>
<p>Upon giving informed written consent, completing the handedness questionnaire (FLI), and a brief demographic questionnaire, participants sat in front of the apparatus (see <xref ref-type="fig" rid="fig1">Figure 1</xref> for details). Words were presented on a computer screen (36&#x202F;&#x00D7;&#x202F;28&#x202F;cm, positioned about 35&#x202F;cm apart) and, to ensure that participants paid attention to the <italic>visually</italic> presented words they rested their heads on an ophthalmic chin rest. Words subtended a visual angle included between 8.99&#x00B0; and 3.27&#x00B0; width (depending in words&#x2019; length) and 2.46&#x00B0; height. Instructions were: &#x201C;Please, rate the words that will appear on the screen in terms of how positive they are. Use a scale from 1-to-9, with 1&#x202F;=&#x202F;&#x201C;not positive at all&#x201D; and 9&#x202F;=&#x202F;&#x201C;very positive.&#x201D; Participants used their non-dominant (left) hands to type their answers on a keypad. They were asked to pay attention only to the words appearing on the screen placed in front of them and to ignore tactile stimuli. Participants were required to answer as quickly and as accurately as possible.</p>
<fig position="float" id="fig1">
<label>Figure 1</label>
<caption>
<p>Experimental setup: <bold>(a)</bold> the monitor showing the 24 visual stimuli (in this case, the word &#x201C;Education&#x201D;), while <italic>at the same time</italic> <bold>(b)</bold> the arm of the machine lowers to present the four tactile stimuli to the gloved hand fixed with Velcro to the table; the fingertips of three fingers (index, middle, and ring) are exposed to the tactile stimulus (the image of the hand was designed by Freepik [<ext-link xlink:href="https://www.freepik.com" ext-link-type="uri">https://www.freepik.com</ext-link>] and modified for this article).</p>
</caption>
<graphic xlink:href="fpsyg-16-1644393-g001.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">Diagram with two panels. Panel a shows an eye viewing a screen labeled "Education." Panel b illustrates a machine arm with urethane rubber pressing down onto a hand beneath it.</alt-text>
</graphic>
</fig>
<p>Words remained on the screen until an answer was produced or up to 2,500&#x202F;ms. The tactile stimuli remained in touch with participants&#x2019; hands until the applied force of 5&#x202F;N was reached. The initial five trials were for practice only, and employed rubbers and visuals words that were not used in the actual experiment. Twenty-four experimental trials were then conducted for each participant. The visually presented words were pseudo-randomized such that words belonging to the same category (positive/neutral/negative) were never consecutively presented. Likewise, tactile stimuli of the same category (soft/hard) were never consecutively presented. Valence ratings and reaction times (RT) were recorded for each trial. The experimental session lasted about 30&#x202F;min, while the pilot experiment lasted about 15&#x202F;min. None of the participants who joined the pilot were allowed to take part in Experiment 1.</p>
</sec>
</sec>
<sec id="sec7">
<label>2.2</label>
<title>Results</title>
<p>For each participant we calculated the average Valence Ratings and RT (dependent variables) for the three types of Visual Words (Positive/Neutral/Negative, first independent variable) and two types of Tactile Rubbers (Soft/Hard, second independent variable), thus resulting in a 3 &#x00D7; 2 design. These data underwent statistical processing.</p>
<sec id="sec8">
<label>2.2.1</label>
<title>Valence ratings</title>
<p>The Shapiro&#x2013;Wilk test of normality was significant for two-out-of-six datasets [<italic>W</italic>(24)&#x202F;=&#x202F;0.885, <italic>p</italic>&#x202F;=&#x202F;0.011 and <italic>W</italic>(24)&#x202F;=&#x202F;0.896, <italic>p</italic>&#x202F;=&#x202F;0.017, while for all the others <italic>W</italic>(24)&#x202F;&#x003E;&#x202F;0.957, <italic>p</italic>&#x202F;&#x003E;&#x202F;0.387]. Since most of the tests was not significant (4-out-of-6) then we conducted parametric statistical tests on our data (<xref ref-type="bibr" rid="ref11">Field, 2009</xref>).</p>
<p>We ran a two-way within-subjects ANOVA on the average Valence Ratings with Visual Words and Tactile Rubbers as factors. The Mauchly&#x2019;s test showed that the assumption of sphericity was violated for the main effect of Visual Words [<italic>&#x03C7;<sup>2</sup></italic>(2)&#x202F;=&#x202F;7.22, <italic>p</italic>&#x202F;=&#x202F;0.030]. Therefore, the associated degree of freedom for the main effect of Visual Words was corrected using Greenhouse&#x2013;Geisser estimates of sphericity.</p>
<p>The results of the two-way ANOVA showed a statistically significant main effect of Visual Words on participants&#x2019; Valence Ratings [<italic>F</italic>(1.56, 35.94)&#x202F;=&#x202F;171.98, <italic>p</italic>&#x202F;&#x003C;&#x202F;0.001, <italic>&#x03B7;<sub>p</sub><sup>2</sup></italic>&#x202F;=&#x202F;0.88]. Post-hoc pairwise comparisons with Bonferroni adjustment indicated that the average Valence Ratings were significantly higher for Positive Visual Words (<italic>M</italic>&#x202F;=&#x202F;7.59, SD&#x202F;=&#x202F;0.90) than for Neutral Visual Words (<italic>M</italic>&#x202F;=&#x202F;6.01, SD&#x202F;=&#x202F;1.15) [<italic>p</italic>&#x202F;&#x003C;&#x202F;0.001] and Negative Visual Words (<italic>M</italic>&#x202F;=&#x202F;3.04, SD&#x202F;=&#x202F;1.20) [<italic>p</italic>&#x202F;&#x003C;&#x202F;0.001]. Average Valence Ratings were also significantly higher for Neutral Visual Words than for Negative Visual Words [<italic>p</italic>&#x202F;&#x003C;&#x202F;0.001] (see <xref ref-type="fig" rid="fig2">Figure 2a</xref>, left). This confirms the words&#x2019; categorization that emerged from the pilot experiment.</p>
<fig position="float" id="fig2">
<label>Figure 2</label>
<caption>
<p>Results of Experiment 1 on Valence Ratings and RT; the size of the bars indicates the quartile, the horizontal lines inside each bar indicate the median, and the white diamonds indicate the average. Whiskers represent the largest and smallest data values that are within 1.5&#x202F;&#x00D7;&#x202F;the interquartile range (IQR) above the third quartile (Q3) and below the first quartile (Q1), respectively. The dots above/below the whiskers are the outliers. The notches of the boxes indicate the 95% confidence level around the median. <bold>(a)</bold> Valence Ratings for the Visual Words (left) and Tactile Rubbers (right); <bold>(b)</bold> RT for the same variables. &#x002A;&#x002A;&#x002A;<italic>p</italic>&#x202F;&#x003C;&#x202F;0.001, &#x002A;&#x002A;<italic>p</italic>&#x202F;&#x003C;&#x202F;0.010, and &#x002A;<italic>p</italic>&#x202F;&#x003C;&#x202F;0.050.</p>
</caption>
<graphic xlink:href="fpsyg-16-1644393-g002.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">Two panels show box plots comparing valence ratings and reaction times. Panel (a) illustrates higher valence ratings for positive compared to neutral and negative, with significant differences indicated by asterisks. Soft and hard valence ratings are similar. Panel (b) displays reaction times, with positive having lower times than neutral and negative. Soft is also lower than hard, with significance denoted by asterisks.</alt-text>
</graphic>
</fig>
<p>The main effect of Tactile Rubbers and the interaction between Visual Words and Tactile Rubbers were not statistically significant [<italic>F</italic>(1, 23)&#x202F;=&#x202F;0.87, <italic>p</italic>&#x202F;=&#x202F;0.36, <italic>&#x03B7;<sub>p</sub><sup>2</sup></italic>&#x202F;=&#x202F;0.04 and <italic>F</italic>(2, 46)&#x202F;=&#x202F;1.78, <italic>p</italic>&#x202F;=&#x202F;0.18, <italic>&#x03B7;<sub>p</sub><sup>2</sup></italic>&#x202F;=&#x202F;0.07, respectively] (see <xref ref-type="fig" rid="fig2">Figure 2a</xref>, right). This suggests that the valence ratings of visually presented words were not influenced by the tactile stimuli.</p>
</sec>
<sec id="sec9">
<label>2.2.2</label>
<title>Reaction times</title>
<p>The Shapiro&#x2013;Wilk test of normality was significant for each dataset [all <italic>W</italic>(24)&#x202F;&#x003C;&#x202F;0.907, all <italic>p</italic>&#x202F;&#x003C;&#x202F;0.031]. Therefore, we conducted non-parametric statistical tests on our data.</p>
<p>Since our design requires a two-way ANOVA, to clarify the main effects of Visual Words and Tactile Rubbers we averaged these variables. The first procedure produced three datasets based on the types of Visual Word, which were analyzed using a Related-Samples Friedman test. Results showed a significant effect of the Visual Words [&#x03C7;<sup>2</sup>(5)&#x202F;=&#x202F;12.33, <italic>p</italic>&#x202F;=&#x202F;0.002], and the Bonferroni-corrected pairwise comparisons confirmed that Positive Visual Words were rated more rapidly (<italic>M</italic>&#x202F;=&#x202F;2.24&#x202F;s, SD&#x202F;=&#x202F;1.03) than Neural (<italic>M</italic>&#x202F;=&#x202F;2.42&#x202F;s, SD&#x202F;=&#x202F;1.03) and Negative (<italic>M</italic>&#x202F;=&#x202F;2.47&#x202F;s, SD&#x202F;=&#x202F;1.06) Visual Words [<italic>p</italic>&#x202F;=&#x202F;0.012 and <italic>p</italic>&#x202F;=&#x202F;0.004, respectively] (see <xref ref-type="fig" rid="fig2">Figure 2b</xref>, left). This indicates that positive words were rated more rapidly. Then, we averaged RT across the three types of Visual Words to obtain two datasets based on the types of Tactile Rubbers, which were analyzed using a Related-Samples Wilcoxon Test. Results showed a significant difference [<italic>Z</italic>&#x202F;=&#x202F;34.50, <italic>p</italic>&#x202F;&#x003C;&#x202F;0.002], thus confirming that touching Soft rubbers slowed RT (<italic>M</italic>&#x202F;=&#x202F;2.45&#x202F;s, SD&#x202F;=&#x202F;1.02&#x202F;s), compared to Hard rubbers (<italic>M</italic>&#x202F;=&#x202F;2.30&#x202F;s, SD&#x202F;=&#x202F;1.02&#x202F;s) (see <xref ref-type="fig" rid="fig2">Figure 2b</xref>, right). Finally, investigate the interaction between Visual Words and Tactile Rubbers, for each visual condition (Positive/Neutral/Negative) we calculated the difference Soft minus Hard (see <xref ref-type="supplementary-material" rid="SM1">Supplementary Table S3</xref> for details) and ran a Related-Samples Friedman test. The result was not significant [&#x03C7;<sup>2</sup>(2)&#x202F;=&#x202F;0.333, <italic>p</italic>&#x202F;=&#x202F;0.846], indicating the lack of interaction.</p>
</sec>
</sec>
<sec id="sec10">
<label>2.3</label>
<title>Discussion</title>
<p>The main results of Experiment 1 were that (1) words with a positive valence were faster to rate. Additionally, (2) touching hard rubbers speeded the rating of <italic>all</italic> the visually presented words. Although the advantage for processing words with a positive valence is still strongly debated (<xref ref-type="bibr" rid="ref21">Kauschke et al., 2019</xref>), most of the evidence favours the notion of faster reaction times for positive words during affective processing (<xref ref-type="bibr" rid="ref20">Kappes and Bermeitinger, 2016</xref>; <xref ref-type="bibr" rid="ref23">Kever et al., 2017</xref>; <xref ref-type="bibr" rid="ref32">Liu et al., 2016</xref>; <xref ref-type="bibr" rid="ref48">Stenberg et al., 1998</xref>). Therefore, our first result (faster processing of positive words) confirms this major trend in literature.</p>
<p>The other main finding was that hard tactile stimuli speeded the rating of visual words or, that soft tactile stimuli <italic>slowed</italic> the rating of visual words. We favor the latter interpretation of the results, because our past research (<xref ref-type="bibr" rid="ref40">Pasqualotto et al., 2020</xref>) showed that the soft stimuli used in this experiment are also more pleasant. Although somewhat debated (<xref ref-type="bibr" rid="ref33">Luck et al., 2021</xref>), pleasant stimuli are likely to attract more attention (<xref ref-type="bibr" rid="ref16">Gupta, 2019</xref>) and to slow the reaction times for visually presented words. If this is the case, would pleasant tactile stimuli affect any kind of visual processing? Or just the visual processing involving affective valence? Evidence for the latter would suggest task-specific interactions between touch and vision (<xref ref-type="bibr" rid="ref30">Lacey and Sathian, 2011</xref>; <xref ref-type="bibr" rid="ref37">Pascual-Leone and Hamilton, 2001</xref>; <xref ref-type="bibr" rid="ref39">Pasqualotto et al., 2013</xref>). To clarify this, we ran Experiment 2.</p>
</sec>
</sec>
<sec id="sec11">
<label>3</label>
<title>Experiment 2</title>
<p>Experiment 2 investigated the multisensory interaction of touch and vision, but using a semantic task that, unlike Experiment 1, did not involve the affective component. Specifically, here participants rated the level of abstractness of the same visually presented words used in Experiment 1, while they were touching the same urethane rubbers. A pilot study helped us to classify the visual words into three levels of abstractness (High/Medium/Low).</p>
<sec id="sec12">
<label>3.1</label>
<title>Materials and methods</title>
<sec id="sec13">
<label>3.1.1</label>
<title>Participants</title>
<p>Twenty-four (12 male and 12 female) na&#x00EF;ve right-handed volunteers aged between 18 and 57 (mean 26.04&#x202F;years) participated to Experiment 2. All the other details were the same as in Experiment 1.</p>
</sec>
<sec id="sec14">
<label>3.1.2</label>
<title>Apparatus</title>
<p>In order to classify the visual stimuli for Experiment 2, a pilot study (<italic>N</italic>&#x202F;=&#x202F;10; five male and five female, average age 25&#x202F;years) was conducted. Participants were randomly presented with the 24 visual words used in Experiment 1, and they rated the level of <italic>abstractness</italic> of these words (from &#x201C;not abstract at all&#x201D; to &#x201C;very abstract&#x201D;) on a scale from 1-to-9 (with 1 indicating &#x201C;not abstract at all&#x201D;).</p>
<p>Average Abstractness ratings were calculated for each word and then words were ordered by their Abstractness. The top eight words (i.e., those with the highest abstractness ratings) were classified as High, the bottom eight words (those with the lowest abstractness ratings) were classified as Low, and the eight words in the middle were classified as Medium (see <xref ref-type="supplementary-material" rid="SM1">Supplementary Table S2</xref> for details). All the other details were identical to Experiment 1.</p>
</sec>
<sec id="sec15">
<label>3.1.3</label>
<title>Procedure</title>
<p>The procedure of Experiment 2 was the same as Experiment 1, with the exception that participants received the instruction: &#x201C;Please, rate the words that will appear on the screen in terms of how abstract they are. Use a scale from 1-to-9, with 1&#x202F;=&#x202F;&#x201C;not abstract at all&#x201D; and 9&#x202F;=&#x202F;&#x201C;very abstract.&#x201D;</p>
</sec>
</sec>
<sec id="sec16">
<label>3.2</label>
<title>Results</title>
<p>Raw data were pre-processed as in Experiment 1.</p>
<sec id="sec17">
<label>3.2.1</label>
<title>Valence ratings</title>
<p>The Shapiro&#x2013;Wilk test of normality was significant for one-out-of-six datasets [<italic>W</italic>(24)&#x202F;=&#x202F;0.905, <italic>p</italic>&#x202F;=&#x202F;0.028, while for all the others <italic>W</italic>(24)&#x202F;&#x003E;&#x202F;0.941, <italic>p</italic>&#x202F;&#x003E;&#x202F;0.174]. Therefore, we conducted parametric statistical tests on our data.</p>
<p>As in Experiment 1, a two-way within-subjects ANOVA on the Visual Words average ratings with Visual Words (High Abstractness/Medium Abstractness/Low Abstractness) and Tactile Rubbers (Hard vs. Soft) as independent variables was run.</p>
<p>The Mauchly&#x2019;s test showed that the assumption of sphericity was violated for the main effect of Visual Words [<italic>&#x03C7;<sup>2</sup></italic>(2)&#x202F;=&#x202F;11.44, <italic>p</italic>&#x202F;=&#x202F;0.030]. Therefore, for this variable the degree of freedom was corrected using Greenhouse&#x2013;Geisser estimates of sphericity.</p>
<p>The results of the two-way ANOVA highlighted a statistically significant main effect of Visual Words on the participants&#x2019; abstractness ratings [<italic>F</italic>(1.42, 32.73)&#x202F;=&#x202F;44.97, <italic>p</italic>&#x202F;&#x003C;&#x202F;0.001, <italic>&#x03B7;<sub>p</sub><sup>2</sup></italic>&#x202F;=&#x202F;0.660]. Post-hoc pairwise comparisons with Bonferroni adjustments indicated that the average abstractness ratings were significantly higher for High Abstractness words (M&#x202F;=&#x202F;6.72, SD&#x202F;=&#x202F;1.43) than for Medium Abstractness words (<italic>M</italic>&#x202F;=&#x202F;5.78, SD&#x202F;=&#x202F;1.71) [<italic>p</italic>&#x202F;&#x003C;&#x202F;0.017] and Low Abstractness words (<italic>M</italic>&#x202F;=&#x202F;3.70, SD&#x202F;=&#x202F;1.65) [<italic>p</italic>&#x202F;&#x003C;&#x202F;0.001]. Average abstractness ratings were also significantly higher for Medium Abstractness words than for Low Abstractness Words [<italic>p</italic>&#x202F;&#x003C;&#x202F;0.001]. Substantially, these results corroborate the categorization of the words that emerged from the pilot (see <xref ref-type="fig" rid="fig3">Figure 3a</xref>, left).</p>
<fig position="float" id="fig3">
<label>Figure 3</label>
<caption>
<p>Results of Experiment 1 on Abstractness Ratings and RT; the size of the bars indicates the quartile, the horizontal lines inside each bar indicate the median, and the white diamonds indicate the average. Whiskers represent the largest and smallest data values that are within 1.5&#x202F;&#x00D7;&#x202F;the interquartile range (IQR) above the third quartile (Q3) and below the first quartile (Q1), respectively. The dots above/below the whiskers are the outliers. The notches of the boxes indicate the 95% confidence level around the median. <bold>(a)</bold> Abstractness Ratings for the Visual Words (left) and Tactile Rubbers (right); <bold>(b)</bold> RT for the same variables. &#x002A;&#x002A;&#x002A;<italic>p</italic>&#x202F;&#x003C;&#x202F;0.001, &#x002A;&#x002A;<italic>p</italic>&#x202F;&#x003C;&#x202F;0.010, and &#x002A;<italic>p</italic>&#x202F;&#x003C;&#x202F;0.050.</p>
</caption>
<graphic xlink:href="fpsyg-16-1644393-g003.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">Box plots illustrating abstractness ratings and reaction times. Panel (a) shows abstractness ratings for high, medium, and low conditions, and for soft and hard conditions. Significant differences are indicated by asterisks. Panel (b) displays reaction times in seconds for the same conditions.</alt-text>
</graphic>
</fig>
<p>The main effect of Tactile Rubbers and the interaction effect between Visual Words and Tactile Rubbers were not statistically significant [<italic>F</italic>(1, 23)&#x202F;=&#x202F;0.12, <italic>p</italic>&#x202F;=&#x202F;0.735, <italic>&#x03B7;<sub>p</sub><sup>2</sup></italic>&#x202F;=&#x202F;0.010 and <italic>F</italic>(2, 46)&#x202F;=&#x202F;0.76, <italic>p</italic>&#x202F;=&#x202F;0.474, <italic>&#x03B7;<sub>p</sub><sup>2</sup></italic>&#x202F;=&#x202F;0.030, respectively] (see <xref ref-type="fig" rid="fig3">Figure 3a</xref>, right). These results suggest that abstractness ratings are mostly influenced by the abstractness of the displayed words.</p>
</sec>
<sec id="sec18">
<label>3.2.2</label>
<title>Reaction times</title>
<p>The Shapiro&#x2013;Wilk test of normality was significant for five-out-of-six datasets [all <italic>W</italic>(24)&#x202F;&#x003C;&#x202F;0.883, all <italic>p</italic>&#x202F;&#x003C;&#x202F;0.009, while for one dataset it was not <italic>W</italic>(24)&#x202F;=&#x202F;0.937, <italic>p</italic>&#x202F;=&#x202F;0.143]. Therefore, we conducted non-parametric statistical tests on our data.</p>
<p>As in Experiment 1, for the main effects of Visual Words and Tactile Rubbers we averaged these variables. A Related-Samples Friedman test showed a lack of significant effect of the Visual Words [&#x03C7;<sup>2</sup>(2)&#x202F;=&#x202F;1.36, <italic>p</italic>&#x202F;=&#x202F;0.506], see <xref ref-type="fig" rid="fig3">Figure 3b</xref>, left. Then, a Related-Samples Wilcoxon Test indicted a non-significant effect for the Tactile Rubbers (<italic>Z</italic>&#x202F;=&#x202F;105.50, <italic>p</italic>&#x202F;=&#x202F;0.203, see <xref ref-type="fig" rid="fig3">Figure 3b</xref>, right). Finally, to investigate the interaction between Visual Words and Tactile Rubbers, for each visual condition (High/Medium/Low) we calculated the difference Soft minus Hard (see <xref ref-type="supplementary-material" rid="SM1">Supplementary Table S4</xref> for details) and ran a Related-Samples Friedman test., which was not significant [&#x03C7;<sup>2</sup>(2)&#x202F;=&#x202F;0.583, <italic>p</italic>&#x202F;=&#x202F;0.747].</p>
</sec>
</sec>
<sec id="sec19">
<label>3.3</label>
<title>Discussion</title>
<p>The aim of Experiment 2 was to better characterize the results of Experiment 1. We found that touching soft stimuli did <italic>not</italic> significantly affect the speed of abstractness ratings relative to words presented through vision. This suggests that the cross-modal interaction between touch and vision is task-sensitive (e.g., <xref ref-type="bibr" rid="ref30">Lacey and Sathian, 2011</xref>) and can be measured in tasks involving the affective component only (Experiment 1).</p>
</sec>
</sec>
<sec id="sec20">
<label>4</label>
<title>General discussion</title>
<p>Our study investigated the multisensory interaction of vision and touch within the affective domain. Initially we found that the presentation of tactile stimuli did not affect the valance ratings of visual words, yet soft tactile stimuli slowed the affective valence ratings for visually presented words, compared to hard tactile stimuli. Then we better characterized the above results in terms of task-specificity; touching soft stimuli slowed the rating of visually presented words <italic>only</italic> in affective ratings (and not in semantic ratings). This set of results suggests touch affects the speed of affective visual processing.</p>
<p><xref ref-type="bibr" rid="ref42">Pecchinenda et al. (2014)</xref> reported an interaction between the valence of the words and the symmetricity of the dot clouds in terms of accuracy (but not in terms of reaction times). Yet, unlike them, we did not find an interaction between the valence of the words and the softness of the rubbers, but instead a <italic>main</italic> effect of softness in terms of reaction times. Our lack of interaction can be largely explained by the fact that their study was unisensory (vision) while ours was multisensory (vision and touch). Indeed, each sensory modality represents information in slightly different manners (<xref ref-type="bibr" rid="ref1">Behrmann and Ewell, 2003</xref>; <xref ref-type="bibr" rid="ref29">Lacey et al., 2007</xref>; <xref ref-type="bibr" rid="ref52">Whitaker et al., 2008</xref>), therefore the representation of visually perceived affective stimuli is partly different from tactile perceived affective stimuli, and thus in our case interaction in the valence evaluation was not observed. Nevertheless, through different sets of results, both studies reported the effect of affective stimuli on affective processing, but not on semantic processing.</p>
<p>Studies on <italic>visual</italic> priming (including Pecchinenda and colleagues), where a visual stimulus facilitates the processing of the following visual stimulus, reported that the type of judgement required to participants (semantic vs. affective) determined the presence/absence of the priming itself (<xref ref-type="bibr" rid="ref28">Klauer and Musch, 2002</xref>; <xref ref-type="bibr" rid="ref31">Lichtenstein-Vidne et al., 2012</xref>; <xref ref-type="bibr" rid="ref46">Spruyt et al., 2007</xref>; <xref ref-type="bibr" rid="ref49">Storbeck and Robinson, 2004</xref>). Thus, semantic priming would occur when stimuli undergo a semantic judgement (e.g., is it a verb or an adjective?), while affective priming would occur when the same stimuli undergo an affective judgement (e.g., is it good or bad?). Indeed, in our experiments we found an effect of the tactile stimuli <italic>only</italic> when participants were asked to perform the affective judgement of the visual stimuli. Therefore, in terms of task-specificity our results are in line with previous research involving the visual modality and extend our knowledge to the vision-tactile (multisensory) domain.</p>
<p>Although we found that touching soft stimuli slows the affective processing of visually presented words, and this result fits with the priming literature, we are aware that in our experiments tactile and visual stimuli were presented at the same time, thus representing a particular case of priming with no interstimulus interval. Additionally, it is not entirely clear whether soft tactile stimuli slowed the affective judgment of visual stimuli or whether <italic>hard</italic> tactile stimuli <italic>speeded</italic> the affective judgment of visual stimuli. Yet, our previous research found that tactile stimulation of softer objects felt more pleasant (<xref ref-type="bibr" rid="ref26">Kitada et al., 2021</xref>; <xref ref-type="bibr" rid="ref40">Pasqualotto et al., 2020</xref>), thus supporting the interpretation that, probably through an attentional mechanism, soft tactile stimuli slowed the rating of visual words. The modulation of attention by soft-and-pleasant stimuli is also supported by <xref ref-type="bibr" rid="ref22">Kawamichi et al. (2015)</xref>, who reported that touching a friend&#x2019;s hand (relative to a non-embodied rubber hand, <xref ref-type="bibr" rid="ref8">Fahey et al., 2019</xref>) reduced the unpleasantness of aversive visual stimuli and reduced visual cortex activity. This suggests that the more pleasant (&#x201C;comfortable&#x201D;) tactile stimulation swayed the attention from the visual stimuli. Yet, although speculative, both processes might be at play, with soft objects slowing the affective judgment of visual stimuli via the above-mentioned attentional mechanism and hard objects speeding their judgement.</p>
<p>We found that the link between touch and vision was limited to the affective processing, thus task-sensitive. This is a rather established finding (<xref ref-type="bibr" rid="ref14">Ghazanfar and Schroeder, 2006</xref>; <xref ref-type="bibr" rid="ref37">Pascual-Leone and Hamilton, 2001</xref>), where &#x201C;unimodal&#x201D; and &#x201C;associative&#x201D; brain areas cooperate to accomplish the same task. Likewise, we found that an affective task carried out by vision was influenced by a tactile input eliciting pleasantness. Future studies should address how the present results could extend to other stimuli, such as textures (<xref ref-type="bibr" rid="ref43">Roberts et al., 2024</xref>), and affect stimuli selection (<xref ref-type="bibr" rid="ref50">Streicher and Estes, 2016</xref>).</p>
<p>In conclusion, although the generalization of our results should be tested using images rather that words (<xref ref-type="bibr" rid="ref3">Bradley and Lang, 2007</xref>), our study for the first time investigated the effect of tactile input on the affective judgment of visual stimuli and found that softness slows judgment, and that this cross-modal effect is task-specific.</p>
</sec>
</body>
<back>
<sec sec-type="data-availability" id="sec21">
<title>Data availability statement</title>
<p>Data are available at: <ext-link xlink:href="https://osf.io/86der/overview" ext-link-type="uri">https://osf.io/86der/overview</ext-link>.</p>
</sec>
<sec sec-type="ethics-statement" id="sec22">
<title>Ethics statement</title>
<p>The studies involving humans were approved by the Institutional Review Board (IRB-2018-07-013) at Nanyang Technological University. The studies were conducted in accordance with the local legislation and institutional requirements. The participants provided their written informed consent to participate in this study.</p>
</sec>
<sec sec-type="author-contributions" id="sec23">
<title>Author contributions</title>
<p>AP: Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing. UL: Writing &#x2013; review &#x0026; editing. RK: Writing &#x2013; review &#x0026; editing.</p>
</sec>
<sec sec-type="COI-statement" id="sec25">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="ai-statement" id="sec26">
<title>Generative AI statement</title>
<p>The authors declare that no Gen AI was used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p>
</sec>
<sec sec-type="disclaimer" id="sec27">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<sec sec-type="supplementary-material" id="sec28">
<title>Supplementary material</title>
<p>The Supplementary material for this article can be found online at: <ext-link xlink:href="https://www.frontiersin.org/articles/10.3389/fpsyg.2025.1644393/full#supplementary-material" ext-link-type="uri">https://www.frontiersin.org/articles/10.3389/fpsyg.2025.1644393/full#supplementary-material</ext-link></p>
<supplementary-material xlink:href="Supplementary_file_1.docx" id="SM1" mimetype="application/vnd.openxmlformats-officedocument.wordprocessingml.document" xmlns:xlink="http://www.w3.org/1999/xlink"/>
</sec>
<ref-list>
<title>References</title>
<ref id="ref1"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Behrmann</surname><given-names>M.</given-names></name> <name><surname>Ewell</surname><given-names>C.</given-names></name></person-group> (<year>2003</year>). <article-title>Expertise in tactile pattern recognition</article-title>. <source>Psychol. Sci.</source> <volume>14</volume>, <fpage>480</fpage>&#x2013;<lpage>492</lpage>. doi: <pub-id pub-id-type="doi">10.1111/1467-9280.02458</pub-id>, PMID: <pub-id pub-id-type="pmid">12930480</pub-id></mixed-citation></ref>
<ref id="ref2"><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Bradley</surname><given-names>M. M.</given-names></name> <name><surname>Lang</surname><given-names>P. J.</given-names></name></person-group> (<year>1999</year>) Affective Norms for English Words (ANEW): Instruction manual and affective ratings (Vol. 30, No. 1, pp. 25&#x2013;36). Technical Report C-1, University of Florida, Center for Research in Psychophysiology. Available online at: <ext-link xlink:href="http://www.scribd.com/doc/42601042/Affective-Norms-for-English-Words" ext-link-type="uri">http://www.scribd.com/doc/42601042/Affective-Norms-for-English-Words</ext-link></mixed-citation></ref>
<ref id="ref3"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Bradley</surname><given-names>M. M.</given-names></name> <name><surname>Lang</surname><given-names>P. J.</given-names></name></person-group> (<year>2007</year>). &#x201C;<article-title>The international affective picture system (IAPS) in the study of emotion and attention</article-title>&#x201D; in <source>Handbook of emotion elicitation and assessment</source>. eds. <person-group person-group-type="editor"><name><surname>Coan</surname><given-names>J. A.</given-names></name> <name><surname>Allen</surname><given-names>J. J. B.</given-names></name></person-group> (<publisher-loc>Oxford</publisher-loc>: <publisher-name>Oxford University Press</publisher-name>), <fpage>29</fpage>&#x2013;<lpage>46</lpage>.</mixed-citation></ref>
<ref id="ref4"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Cattaneo</surname><given-names>Z.</given-names></name> <name><surname>Vecchi</surname><given-names>T.</given-names></name></person-group> (<year>2008</year>). <article-title>Supramodality effects in visual and haptic spatial processes</article-title>. <source>J. Exp. Psychol. Learn. Mem. Cogn.</source> <volume>34</volume>, <fpage>631</fpage>&#x2013;<lpage>642</lpage>. doi: <pub-id pub-id-type="doi">10.1037/0278-7393.34.3.631</pub-id>, PMID: <pub-id pub-id-type="pmid">18444761</pub-id></mixed-citation></ref>
<ref id="ref5"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Cavdan</surname><given-names>M.</given-names></name> <name><surname>Celebi</surname><given-names>B.</given-names></name> <name><surname>Drewing</surname><given-names>K.</given-names></name></person-group> (<year>2023</year>). <article-title>Simultaneous emotional stimuli prolong the timing of vibrotactile events</article-title>. <source>IEEE Trans. Haptics</source> <volume>16</volume>, <fpage>622</fpage>&#x2013;<lpage>627</lpage>. doi: <pub-id pub-id-type="doi">10.1109/TOH.2023.3275190</pub-id>, PMID: <pub-id pub-id-type="pmid">37186525</pub-id></mixed-citation></ref>
<ref id="ref6"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Della Longa</surname><given-names>L.</given-names></name> <name><surname>Sacchetti</surname><given-names>S.</given-names></name> <name><surname>Farroni</surname><given-names>T.</given-names></name> <name><surname>McGlone</surname><given-names>F.</given-names></name></person-group> (<year>2022</year>). <article-title>Does nice or nasty matter? The intensity of touch modulates the rubber hand illusion</article-title>. <source>Front. Psychol.</source> <volume>13</volume>:<fpage>901413</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2022.901413</pub-id>, PMID: <pub-id pub-id-type="pmid">35769756</pub-id></mixed-citation></ref>
<ref id="ref7"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Drewing</surname><given-names>K.</given-names></name> <name><surname>Weyel</surname><given-names>C.</given-names></name> <name><surname>Celebi</surname><given-names>H.</given-names></name> <name><surname>Kaya</surname><given-names>D.</given-names></name></person-group> (<year>2018</year>). <article-title>Systematic relations between affective and sensory material dimensions in touch</article-title>. <source>IEEE Trans. Haptics</source> <volume>11</volume>, <fpage>611</fpage>&#x2013;<lpage>622</lpage>. doi: <pub-id pub-id-type="doi">10.1109/TOH.2018.2836427</pub-id>, PMID: <pub-id pub-id-type="pmid">29994318</pub-id></mixed-citation></ref>
<ref id="ref8"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Fahey</surname><given-names>S.</given-names></name> <name><surname>Santana</surname><given-names>C.</given-names></name> <name><surname>Kitada</surname><given-names>R.</given-names></name> <name><surname>Zheng</surname><given-names>Z.</given-names></name></person-group> (<year>2019</year>). <article-title>Affective judgement of social touch on a hand associated with hand embodiment</article-title>. <source>Q. J. Exp. Psychol. (Hove)</source> <volume>72</volume>, <fpage>2408</fpage>&#x2013;<lpage>2422</lpage>. doi: <pub-id pub-id-type="doi">10.1177/1747021819842785</pub-id>, PMID: <pub-id pub-id-type="pmid">30895891</pub-id></mixed-citation></ref>
<ref id="ref9"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Fazio</surname><given-names>R. H.</given-names></name></person-group> (<year>2001</year>). <article-title>On the automatic activation of associated evaluations: an overview</article-title>. <source>Cogn. Emot.</source> <volume>15</volume>, <fpage>115</fpage>&#x2013;<lpage>141</lpage>. doi: <pub-id pub-id-type="doi">10.1080/0269993004200024</pub-id></mixed-citation></ref>
<ref id="ref10"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Fazio</surname><given-names>R.</given-names></name> <name><surname>Dunham</surname><given-names>K. J.</given-names></name> <name><surname>Griswold</surname><given-names>S.</given-names></name> <name><surname>Denney</surname><given-names>R. L.</given-names></name></person-group> (<year>2013</year>). <article-title>An improved measure of handedness: the fazio laterality inventory</article-title>. <source>Appl. Neuropsychol.</source> <volume>20</volume>, <fpage>197</fpage>&#x2013;<lpage>202</lpage>. doi: <pub-id pub-id-type="doi">10.1080/09084282.2012.684115</pub-id>, PMID: <pub-id pub-id-type="pmid">23406292</pub-id></mixed-citation></ref>
<ref id="ref11"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Field</surname><given-names>A.</given-names></name></person-group> (<year>2009</year>). <source>Discovering statistics using SPSS</source>. <edition>3rd</edition> Edn.<publisher-loc>Thousand Oaks</publisher-loc>: <publisher-name>Sage Publications</publisher-name>, <fpage>575</fpage>.</mixed-citation></ref>
<ref id="ref12"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Filippetti</surname><given-names>M. L.</given-names></name> <name><surname>Kirsch</surname><given-names>L. P.</given-names></name> <name><surname>Crucianelli</surname><given-names>L.</given-names></name> <name><surname>Fotopoulou</surname><given-names>A.</given-names></name></person-group> (<year>2019</year>). <article-title>Affective certainty and congruency of touch modulate the experience of the rubber hand illusion</article-title>. <source>Sci. Rep.</source> <volume>9</volume>:<fpage>2635</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41598-019-38880-5</pub-id>, PMID: <pub-id pub-id-type="pmid">30796333</pub-id></mixed-citation></ref>
<ref id="ref13"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Friedenberg</surname><given-names>J.</given-names></name> <name><surname>Bertamini</surname><given-names>M.</given-names></name></person-group> (<year>2015</year>). <article-title>Aesthetic preference for polygon shape</article-title>. <source>Empir. Stud. Arts</source> <volume>33</volume>, <fpage>144</fpage>&#x2013;<lpage>160</lpage>. doi: <pub-id pub-id-type="doi">10.1177/0276237415594708</pub-id></mixed-citation></ref>
<ref id="ref14"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ghazanfar</surname><given-names>A. A.</given-names></name> <name><surname>Schroeder</surname><given-names>C. E.</given-names></name></person-group> (<year>2006</year>). <article-title>Is neocortex essentially multisensory?</article-title> <source>Trends Cogn. Sci.</source> <volume>10</volume>, <fpage>278</fpage>&#x2013;<lpage>285</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.tics.2006.04.008</pub-id>, PMID: <pub-id pub-id-type="pmid">16713325</pub-id></mixed-citation></ref>
<ref id="ref15"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Guest</surname><given-names>S.</given-names></name> <name><surname>Dessirier</surname><given-names>J. M.</given-names></name> <name><surname>Mehrabyan</surname><given-names>A.</given-names></name> <name><surname>McGlone</surname><given-names>F.</given-names></name> <name><surname>Essick</surname><given-names>G.</given-names></name> <name><surname>Gescheider</surname><given-names>G.</given-names></name> <etal/></person-group>. (<year>2011</year>). <article-title>The development and validation of sensory and emotional scales of touch perception</article-title>. <source>Atten. Percept. Psychophys.</source> <volume>73</volume>, <fpage>531</fpage>&#x2013;<lpage>550</lpage>. doi: <pub-id pub-id-type="doi">10.3758/s13414-010-0037-y</pub-id>, PMID: <pub-id pub-id-type="pmid">21264727</pub-id></mixed-citation></ref>
<ref id="ref16"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Gupta</surname><given-names>R.</given-names></name></person-group> (<year>2019</year>). <article-title>Positive emotions have a unique capacity to capture attention</article-title>. <source>Prog. Brain Res.</source> <volume>247</volume>, <fpage>23</fpage>&#x2013;<lpage>46</lpage>. doi: <pub-id pub-id-type="doi">10.1016/bs.pbr.2019.02.001</pub-id></mixed-citation></ref>
<ref id="ref17"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Hermans</surname><given-names>D.</given-names></name> <name><surname>De Houwer</surname><given-names>J.</given-names></name> <name><surname>Eelen</surname><given-names>P.</given-names></name></person-group> (<year>2001</year>). <article-title>A time course analysis of the affective priming effect</article-title>. <source>Cogn. Emot.</source> <volume>15</volume>, <fpage>143</fpage>&#x2013;<lpage>165</lpage>. doi: <pub-id pub-id-type="doi">10.1080/0269993004200033</pub-id></mixed-citation></ref>
<ref id="ref18"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Hermans</surname><given-names>D.</given-names></name> <name><surname>Houwer</surname><given-names>J. D.</given-names></name> <name><surname>Eelen</surname><given-names>P.</given-names></name></person-group> (<year>1994</year>). <article-title>The affective priming effect: automatic activation of evaluative information in memory</article-title>. <source>Cogn. Emot.</source> <volume>8</volume>, <fpage>515</fpage>&#x2013;<lpage>533</lpage>. doi: <pub-id pub-id-type="doi">10.1080/02699939408408957</pub-id></mixed-citation></ref>
<ref id="ref19"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kaneno</surname><given-names>Y.</given-names></name> <name><surname>Pasqualotto</surname><given-names>A.</given-names></name> <name><surname>Ashida</surname><given-names>H.</given-names></name></person-group> (<year>2024</year>). <article-title>Influence of interoception and body movement on the rubber hand illusion</article-title>. <source>Front. Psychol.</source> <volume>15</volume>:<fpage>1458726</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2024.1458726</pub-id>, PMID: <pub-id pub-id-type="pmid">39723393</pub-id></mixed-citation></ref>
<ref id="ref20"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kappes</surname><given-names>C.</given-names></name> <name><surname>Bermeitinger</surname><given-names>C.</given-names></name></person-group> (<year>2016</year>). <article-title>The emotional stroop as an emotion regulation task</article-title>. <source>Exp. Aging Res.</source> <volume>42</volume>, <fpage>161</fpage>&#x2013;<lpage>194</lpage>. doi: <pub-id pub-id-type="doi">10.1080/0361073X.2016.1132890</pub-id>, PMID: <pub-id pub-id-type="pmid">26890634</pub-id></mixed-citation></ref>
<ref id="ref21"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kauschke</surname><given-names>C.</given-names></name> <name><surname>Bahn</surname><given-names>D.</given-names></name> <name><surname>Vesker</surname><given-names>M.</given-names></name> <name><surname>Schwarzer</surname><given-names>G.</given-names></name></person-group> (<year>2019</year>). <article-title>The role of emotional valence for the processing of facial and verbal stimuli&#x2014;positivity or negativity bias?</article-title> <source>Front. Psychol.</source> <volume>10</volume>:<fpage>1654</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2019.01654</pub-id>, PMID: <pub-id pub-id-type="pmid">31402884</pub-id></mixed-citation></ref>
<ref id="ref22"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kawamichi</surname><given-names>H.</given-names></name> <name><surname>Kitada</surname><given-names>R.</given-names></name> <name><surname>Yoshihara</surname><given-names>K.</given-names></name> <name><surname>Takahashi</surname><given-names>H. K.</given-names></name> <name><surname>Sadato</surname><given-names>N.</given-names></name></person-group> (<year>2015</year>). <article-title>Interpersonal touch suppresses visual processing of aversive stimuli</article-title>. <source>Front. Hum. Neurosci.</source> <volume>9</volume>:<fpage>164</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fnhum.2015.00164</pub-id>, PMID: <pub-id pub-id-type="pmid">25904856</pub-id></mixed-citation></ref>
<ref id="ref23"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kever</surname><given-names>A.</given-names></name> <name><surname>Grynberg</surname><given-names>D.</given-names></name> <name><surname>Vermeulen</surname><given-names>N.</given-names></name></person-group> (<year>2017</year>). <article-title>Congruent bodily arousal promotes the constructive recognition of emotional words</article-title>. <source>Conscious. Cogn.</source> <volume>53</volume>, <fpage>81</fpage>&#x2013;<lpage>88</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.concog.2017.06.007</pub-id>, PMID: <pub-id pub-id-type="pmid">28646661</pub-id></mixed-citation></ref>
<ref id="ref24"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kirsch</surname><given-names>L. P.</given-names></name> <name><surname>Besharati</surname><given-names>S.</given-names></name> <name><surname>Papadaki</surname><given-names>C.</given-names></name> <name><surname>Crucianelli</surname><given-names>L.</given-names></name> <name><surname>Bertagnoli</surname><given-names>S.</given-names></name> <name><surname>Ward</surname><given-names>N.</given-names></name> <etal/></person-group>. (<year>2020</year>). <article-title>Damage to the right insula disrupts the perception of affective touch</article-title>. <source>eLife</source> <volume>9</volume>:<fpage>e47895</fpage>. doi: <pub-id pub-id-type="doi">10.7554/eLife.47895</pub-id>, PMID: <pub-id pub-id-type="pmid">31975686</pub-id></mixed-citation></ref>
<ref id="ref25"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kitada</surname><given-names>R.</given-names></name> <name><surname>Doizaki</surname><given-names>R.</given-names></name> <name><surname>Kwon</surname><given-names>J.</given-names></name> <name><surname>Tanigawa</surname><given-names>T.</given-names></name> <name><surname>Nakagawa</surname><given-names>E.</given-names></name> <name><surname>Kochiyama</surname><given-names>T.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>Brain networks underlying tactile softness perception: a functional magnetic resonance imaging study</article-title>. <source>NeuroImage</source> <volume>197</volume>, <fpage>156</fpage>&#x2013;<lpage>166</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroimage.2019.04.044</pub-id>, PMID: <pub-id pub-id-type="pmid">31029866</pub-id></mixed-citation></ref>
<ref id="ref26"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kitada</surname><given-names>R.</given-names></name> <name><surname>Ng</surname><given-names>M.</given-names></name> <name><surname>Tan</surname><given-names>Z. Y.</given-names></name> <name><surname>Lee</surname><given-names>X. E.</given-names></name> <name><surname>Kochiyama</surname><given-names>T.</given-names></name></person-group> (<year>2021</year>). <article-title>Physical correlates of human-like softness elicit high tactile pleasantness</article-title>. <source>Sci. Rep.</source> <volume>11</volume>:<fpage>16510</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41598-021-96044-w</pub-id>, PMID: <pub-id pub-id-type="pmid">34389767</pub-id></mixed-citation></ref>
<ref id="ref27"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Klatzky</surname><given-names>R. L.</given-names></name> <name><surname>Lederman</surname><given-names>S. J.</given-names></name></person-group> (<year>2011</year>). <article-title>Haptic object perception: spatial dimensionality and relation to vision</article-title>. <source>Philos. Trans. R. Soc. Lond. Ser. B Biol. Sci.</source> <volume>366</volume>, <fpage>3097</fpage>&#x2013;<lpage>3105</lpage>. doi: <pub-id pub-id-type="doi">10.1098/rstb.2011.0153</pub-id>, PMID: <pub-id pub-id-type="pmid">21969691</pub-id></mixed-citation></ref>
<ref id="ref28"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Klauer</surname><given-names>K. C.</given-names></name> <name><surname>Musch</surname><given-names>J.</given-names></name></person-group> (<year>2002</year>). <article-title>Goal-dependent and goal-in-dependent effects of irrelevant evaluations</article-title>. <source>Personal. Soc. Psychol. Bull.</source> <volume>28</volume>, <fpage>802</fpage>&#x2013;<lpage>814</lpage>. doi: <pub-id pub-id-type="doi">10.1177/0146167202289009</pub-id></mixed-citation></ref>
<ref id="ref29"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Lacey</surname><given-names>S.</given-names></name> <name><surname>Campbell</surname><given-names>C.</given-names></name> <name><surname>Sathian</surname><given-names>K.</given-names></name></person-group> (<year>2007</year>). <article-title>Vision and touch: multiple or multisensory representations of objects?</article-title> <source>Perception</source> <volume>36</volume>, <fpage>1513</fpage>&#x2013;<lpage>1521</lpage>. doi: <pub-id pub-id-type="doi">10.1068/p5850</pub-id>, PMID: <pub-id pub-id-type="pmid">18265834</pub-id></mixed-citation></ref>
<ref id="ref30"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Lacey</surname><given-names>S.</given-names></name> <name><surname>Sathian</surname><given-names>K.</given-names></name></person-group> (<year>2011</year>). <article-title>Multisensory object representation: insights from studies of vision and touch</article-title>. <source>Prog. Brain Res.</source> <volume>191</volume>, <fpage>165</fpage>&#x2013;<lpage>176</lpage>. doi: <pub-id pub-id-type="doi">10.1016/B978-0-444-53752-2.00006-0</pub-id></mixed-citation></ref>
<ref id="ref31"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Lichtenstein-Vidne</surname><given-names>L.</given-names></name> <name><surname>Henik</surname><given-names>A.</given-names></name> <name><surname>Safadi</surname><given-names>Z.</given-names></name></person-group> (<year>2012</year>). <article-title>Task relevance modulates processing of distracting emotional stimuli</article-title>. <source>Cogn. Emot.</source> <volume>26</volume>, <fpage>42</fpage>&#x2013;<lpage>52</lpage>. doi: <pub-id pub-id-type="doi">10.1080/02699931.2011.567055</pub-id>, PMID: <pub-id pub-id-type="pmid">21598126</pub-id></mixed-citation></ref>
<ref id="ref32"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Liu</surname><given-names>T.</given-names></name> <name><surname>Liu</surname><given-names>X.</given-names></name> <name><surname>Xiao</surname><given-names>T.</given-names></name> <name><surname>Shi</surname><given-names>J.</given-names></name></person-group> (<year>2016</year>). <article-title>Human recognition memory and conflict control: an event-related potential study</article-title>. <source>Neuroscience</source> <volume>313</volume>, <fpage>83</fpage>&#x2013;<lpage>91</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroscience.2015.11.047</pub-id>, PMID: <pub-id pub-id-type="pmid">26633266</pub-id></mixed-citation></ref>
<ref id="ref33"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Luck</surname><given-names>S. J.</given-names></name> <name><surname>Gaspelin</surname><given-names>N.</given-names></name> <name><surname>Folk</surname><given-names>C. L.</given-names></name> <name><surname>Remington</surname><given-names>R. W.</given-names></name> <name><surname>Theeuwes</surname><given-names>J.</given-names></name></person-group> (<year>2021</year>). <article-title>Progress toward resolving the attentional capture debate</article-title>. <source>Vis. Cogn.</source> <volume>29</volume>, <fpage>1</fpage>&#x2013;<lpage>21</lpage>. doi: <pub-id pub-id-type="doi">10.1080/13506285.2020.1848949</pub-id>, PMID: <pub-id pub-id-type="pmid">33574729</pub-id></mixed-citation></ref>
<ref id="ref34"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Martinovic</surname><given-names>J.</given-names></name> <name><surname>Lawson</surname><given-names>R.</given-names></name> <name><surname>Craddock</surname><given-names>M.</given-names></name></person-group> (<year>2012</year>). <article-title>Time course of information processing in visual and haptic object classification</article-title>. <source>Front. Hum. Neurosci.</source> <volume>6</volume>:<fpage>49</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fnhum.2012.00049</pub-id>, PMID: <pub-id pub-id-type="pmid">22470327</pub-id></mixed-citation></ref>
<ref id="ref35"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Morrison</surname><given-names>I.</given-names></name></person-group> (<year>2016</year>). <article-title>ALE meta-analysis reveals dissociable networks for affective and discriminative aspects of touch</article-title>. <source>Hum. Brain Mapp.</source> <volume>37</volume>, <fpage>1308</fpage>&#x2013;<lpage>1320</lpage>. doi: <pub-id pub-id-type="doi">10.1002/hbm.23103</pub-id>, PMID: <pub-id pub-id-type="pmid">26873519</pub-id></mixed-citation></ref>
<ref id="ref36"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Newell</surname><given-names>F. N.</given-names></name> <name><surname>Ernst</surname><given-names>M. O.</given-names></name> <name><surname>Tjan</surname><given-names>B. S.</given-names></name> <name><surname>B&#x00FC;lthoff</surname><given-names>H. H.</given-names></name></person-group> (<year>2001</year>). <article-title>Viewpoint dependence in visual and haptic object recognition</article-title>. <source>Psychol. Sci.</source> <volume>12</volume>, <fpage>37</fpage>&#x2013;<lpage>42</lpage>. doi: <pub-id pub-id-type="doi">10.1111/1467-9280.00307</pub-id>, PMID: <pub-id pub-id-type="pmid">11294226</pub-id></mixed-citation></ref>
<ref id="ref37"><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Pascual-Leone</surname><given-names>A.</given-names></name> <name><surname>Hamilton</surname><given-names>R.H.</given-names></name></person-group> (<year>2001</year>). <article-title>The metamodal organization of the brain</article-title>. <source>Prog. Brain Res</source>. <volume>134</volume>, <fpage>427</fpage>&#x2013;<lpage>445</lpage>. doi: <pub-id pub-id-type="doi">10.1016/s0079-6123(01)34028-1</pub-id></mixed-citation></ref>
<ref id="ref38"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Pasqualotto</surname><given-names>A.</given-names></name> <name><surname>Dumitru</surname><given-names>M. L.</given-names></name> <name><surname>Myachykov</surname><given-names>A.</given-names></name></person-group> (<year>2016</year>). <article-title>Multisensory integration: brain, body, and world</article-title>. <source>Front. Psychol.</source> <volume>6</volume>:<fpage>2046</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2015.02046</pub-id></mixed-citation></ref>
<ref id="ref39"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Pasqualotto</surname><given-names>A.</given-names></name> <name><surname>Finucane</surname><given-names>C. M.</given-names></name> <name><surname>Newell</surname><given-names>F. N.</given-names></name></person-group> (<year>2013</year>). <article-title>Ambient visual information confers a context- specific, long-term benefit on memory for haptic scenes</article-title>. <source>Cognition</source> <volume>128</volume>, <fpage>363</fpage>&#x2013;<lpage>379</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cognition.2013.04.011</pub-id>, PMID: <pub-id pub-id-type="pmid">23764999</pub-id></mixed-citation></ref>
<ref id="ref40"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Pasqualotto</surname><given-names>A.</given-names></name> <name><surname>Ng</surname><given-names>M.</given-names></name> <name><surname>Tan</surname><given-names>Z. Y.</given-names></name> <name><surname>Kitada</surname><given-names>R.</given-names></name></person-group> (<year>2020</year>). <article-title>Tactile perception of pleasantness in relation to perceived softness</article-title>. <source>Sci. Rep.</source> <volume>10</volume>:<fpage>11189</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41598-020-68034-x</pub-id>, PMID: <pub-id pub-id-type="pmid">32636415</pub-id></mixed-citation></ref>
<ref id="ref41"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Pasqualotto</surname><given-names>A.</given-names></name> <name><surname>Proulx</surname><given-names>M. J.</given-names></name></person-group> (<year>2015</year>). <article-title>Two-dimensional rubber-hand illusion: the Dorian gray hand illusion</article-title>. <source>Multisens. Res.</source> <volume>28</volume>, <fpage>101</fpage>&#x2013;<lpage>110</lpage>. doi: <pub-id pub-id-type="doi">10.1163/22134808-00002473</pub-id>, PMID: <pub-id pub-id-type="pmid">26152054</pub-id></mixed-citation></ref>
<ref id="ref42"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Pecchinenda</surname><given-names>A.</given-names></name> <name><surname>Bertamini</surname><given-names>M.</given-names></name> <name><surname>Makin</surname><given-names>A. D. J.</given-names></name> <name><surname>Ruta</surname><given-names>N.</given-names></name></person-group> (<year>2014</year>). <article-title>The pleasantness of visual symmetry: always, never or sometimes</article-title>. <source>PLoS One</source> <volume>9</volume>:<fpage>e92685</fpage>. doi: <pub-id pub-id-type="doi">10.1371/journal.pone.0092685</pub-id>, PMID: <pub-id pub-id-type="pmid">24658112</pub-id></mixed-citation></ref>
<ref id="ref43"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Roberts</surname><given-names>R. D.</given-names></name> <name><surname>Li</surname><given-names>M.</given-names></name> <name><surname>Allen</surname><given-names>H. A.</given-names></name></person-group> (<year>2024</year>). <article-title>Visual effects on tactile texture perception</article-title>. <source>Sci. Rep.</source> <volume>14</volume>:<fpage>632</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41598-023-50596-1</pub-id>, PMID: <pub-id pub-id-type="pmid">38182637</pub-id></mixed-citation></ref>
<ref id="ref44"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Spence</surname><given-names>C.</given-names></name></person-group> (<year>2022</year>). <article-title>Multisensory contributions to affective touch</article-title>. <source>Curr. Opin. Behav. Sci.</source> <volume>43</volume>, <fpage>40</fpage>&#x2013;<lpage>45</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cobeha.2021.08.003</pub-id></mixed-citation></ref>
<ref id="ref45"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Spence</surname><given-names>C.</given-names></name> <name><surname>Gallace</surname><given-names>A.</given-names></name></person-group> (<year>2011</year>). <article-title>Multisensory design: reaching out to touch the consumer</article-title>. <source>Psychol. Mark.</source> <volume>28</volume>, <fpage>267</fpage>&#x2013;<lpage>308</lpage>. doi: <pub-id pub-id-type="doi">10.1002/mar.20392</pub-id></mixed-citation></ref>
<ref id="ref46"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Spruyt</surname><given-names>A.</given-names></name> <name><surname>De Houwer</surname><given-names>J.</given-names></name> <name><surname>Hermans</surname><given-names>D.</given-names></name> <name><surname>Eelen</surname><given-names>P.</given-names></name></person-group> (<year>2007</year>). <article-title>Affective priming of nonaffective semantic categorization responses</article-title>. <source>Exp. Psychol.</source> <volume>54</volume>, <fpage>44</fpage>&#x2013;<lpage>53</lpage>. doi: <pub-id pub-id-type="doi">10.1027/1618-3169.54.1.44</pub-id>, PMID: <pub-id pub-id-type="pmid">17341014</pub-id></mixed-citation></ref>
<ref id="ref47"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Stein</surname><given-names>B. E.</given-names></name></person-group> (<year>2012</year>). <source>The new handbook of multisensory processing</source>. <publisher-loc>Cambridge</publisher-loc>: <publisher-name>MIT Press</publisher-name>.</mixed-citation></ref>
<ref id="ref48"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Stenberg</surname><given-names>G.</given-names></name> <name><surname>Wiking</surname><given-names>S.</given-names></name> <name><surname>Dahl</surname><given-names>M.</given-names></name></person-group> (<year>1998</year>). <article-title>Judging words at face value: interference in a word processing task reveals automatic processing of affective facial expressions</article-title>. <source>Cogn. Emot.</source> <volume>12</volume>, <fpage>755</fpage>&#x2013;<lpage>782</lpage>. doi: <pub-id pub-id-type="doi">10.1080/026999398379420</pub-id></mixed-citation></ref>
<ref id="ref49"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Storbeck</surname><given-names>J.</given-names></name> <name><surname>Robinson</surname><given-names>M. D.</given-names></name></person-group> (<year>2004</year>). <article-title>Preferences and inferences in encoding visual objects: a systematic comparison of semantic and affective priming</article-title>. <source>Personal. Soc. Psychol. Bull.</source> <volume>30</volume>, <fpage>81</fpage>&#x2013;<lpage>93</lpage>. doi: <pub-id pub-id-type="doi">10.1177/0146167203258855</pub-id>, PMID: <pub-id pub-id-type="pmid">15030645</pub-id></mixed-citation></ref>
<ref id="ref50"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Streicher</surname><given-names>M.</given-names></name> <name><surname>Estes</surname><given-names>Z.</given-names></name></person-group> (<year>2016</year>). <article-title>Multisensory interaction in product choice: grasping a product affects choice of other seen products</article-title>. <source>J. Consum. Psychol.</source> <volume>26</volume>, <fpage>558</fpage>&#x2013;<lpage>565</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jcps.2016.01.001</pub-id></mixed-citation></ref>
<ref id="ref51"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Suzuki</surname><given-names>M.</given-names></name> <name><surname>Gyoba</surname><given-names>J.</given-names></name></person-group> (<year>2008</year>). <article-title>Visual and tactile cross-modal mere exposure effects</article-title>. <source>Cogn. Emot.</source> <volume>22</volume>, <fpage>147</fpage>&#x2013;<lpage>154</lpage>. doi: <pub-id pub-id-type="doi">10.1080/02699930701298382</pub-id></mixed-citation></ref>
<ref id="ref52"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Whitaker</surname><given-names>T. A.</given-names></name> <name><surname>Sim&#x00F5;es-Franklin</surname><given-names>C.</given-names></name> <name><surname>Newell</surname><given-names>F. N.</given-names></name></person-group> (<year>2008</year>). <article-title>Vision and touch: independent or integrated systems for the perception of texture?</article-title> <source>Brain Res.</source> <volume>1242</volume>, <fpage>59</fpage>&#x2013;<lpage>72</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.brainres.2008.05.037</pub-id>, PMID: <pub-id pub-id-type="pmid">18585689</pub-id></mixed-citation></ref>
<ref id="ref53"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Woods</surname><given-names>A. T.</given-names></name> <name><surname>O&#x2019;Modhrain</surname><given-names>S.</given-names></name> <name><surname>Newell</surname><given-names>F. N.</given-names></name></person-group> (<year>2004</year>). <article-title>The effect of temporal delay and spatial differences on cross-modal object recognition</article-title>. <source>Cogn. Affect. Behav. Neurosci.</source> <volume>4</volume>, <fpage>260</fpage>&#x2013;<lpage>269</lpage>. doi: <pub-id pub-id-type="doi">10.3758/CABN.4.2.260</pub-id></mixed-citation></ref>
<ref id="ref54"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Wu</surname><given-names>D.</given-names></name> <name><surname>Wu</surname><given-names>T. I.</given-names></name> <name><surname>Singh</surname><given-names>H.</given-names></name> <name><surname>Padilla</surname><given-names>S.</given-names></name> <name><surname>Atkinson</surname><given-names>D.</given-names></name> <name><surname>Bianchi-Berthouze</surname><given-names>N.</given-names></name> <etal/></person-group>. (<year>2011</year>). <article-title>The affective experience of handling digital fabrics: tactile and visual cross-modal effects</article-title>. <source>Lec. Notes Comput. Sci</source> <volume>6974</volume>, <fpage>427</fpage>&#x2013;<lpage>436</lpage>. doi: <pub-id pub-id-type="doi">10.1007/978-3-642-24600-5_46</pub-id></mixed-citation></ref>
<ref id="ref55"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Zajonc</surname><given-names>R. B.</given-names></name></person-group> (<year>1968</year>). <article-title>Attitudinal effects of mere exposure</article-title>. <source>J. Pers. Soc. Psychol.</source> <volume>9</volume>, <fpage>1</fpage>&#x2013;<lpage>27</lpage>. doi: <pub-id pub-id-type="doi">10.1037/H0025848</pub-id></mixed-citation></ref>
<ref id="ref56"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Zuidhoek</surname><given-names>S.</given-names></name> <name><surname>Visser</surname><given-names>A.</given-names></name> <name><surname>Bredero</surname><given-names>M. E.</given-names></name> <name><surname>Postma</surname><given-names>A.</given-names></name></person-group> (<year>2004</year>). <article-title>Multisensory integration mechanisms in haptic space perception</article-title>. <source>Exp. Brain Res.</source> <volume>157</volume>, <fpage>265</fpage>&#x2013;<lpage>268</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s00221-004-1938-6</pub-id>, PMID: <pub-id pub-id-type="pmid">15197527</pub-id></mixed-citation></ref>
</ref-list>
<fn-group>
<fn id="fn0001" fn-type="custom" custom-type="edited-by"><p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/181506/overview">Vincenza Tarantino</ext-link>, University of Palermo, Italy</p></fn>
<fn id="fn0002" fn-type="custom" custom-type="reviewed-by"><p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/37355/overview">Fernando Marmolejo-Ramos</ext-link>, Flinders University, Australia;</p>
<p><ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2563568/overview">M&#x00FC;ge Cavdan</ext-link>, University of Giessen, Germany</p></fn>
</fn-group>
</back>
</article>