<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" dtd-version="1.3" article-type="research-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Psychol.</journal-id>
<journal-title-group>
<journal-title>Frontiers in Psychology</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Psychol.</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">1664-1078</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpsyg.2026.1733841</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Original Research</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Exploring the link between synesthesia and lucid dreaming through perceptual presence</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name><surname>Matsuda</surname> <given-names>Eiko</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x0002A;</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Visualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/visualization/">Visualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Software" vocab-term-identifier="https://credit.niso.org/contributor-roles/software/">Software</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<uri xlink:href="https://loop.frontiersin.org/people/3224764"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Matsuda</surname> <given-names>Eiko</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x00026; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Software" vocab-term-identifier="https://credit.niso.org/contributor-roles/software/">Software</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<uri xlink:href="https://loop.frontiersin.org/people/3293144"/>
</contrib>
</contrib-group>
<aff id="aff1"><label>1</label><institution>Graduate School of Science and Technology, Keio University</institution>, <city>Yokohama</city>, <country country="jp">Japan</country></aff>
<aff id="aff2"><label>2</label><institution>Department of Social Psychology, Faculty of Sociology, Toyo University</institution>, <city>Tokyo</city>, <country country="jp">Japan</country></aff>
<author-notes>
<corresp id="c001"><label>&#x0002A;</label>Correspondence: Eiko Matsuda, <email xlink:href="mailto:eiko@sd.keio.ac.jp">eiko@sd.keio.ac.jp</email></corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-02-26">
<day>26</day>
<month>02</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2026</year>
</pub-date>
<volume>17</volume>
<elocation-id>1733841</elocation-id>
<history>
<date date-type="received">
<day>28</day>
<month>10</month>
<year>2025</year>
</date>
<date date-type="rev-recd">
<day>20</day>
<month>01</month>
<year>2026</year>
</date>
<date date-type="accepted">
<day>02</day>
<month>02</month>
<year>2026</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x000A9; 2026 Matsuda and Matsuda.</copyright-statement>
<copyright-year>2026</copyright-year>
<copyright-holder>Matsuda and Matsuda</copyright-holder>
<license>
<ali:license_ref start_date="2026-02-26">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<p>This study investigates links between synesthesia and lucid dreaming via perceptual presence and counterfactual-richness (abundant possible sensorimotor contingencies). We hypothesized that synesthetes would report more frequent lucid dreams because enhanced counterfactual-richness facilitates dream control and clarity. We surveyed 616 adults using a synesthesia self-report, the Lucidity and Consciousness in Dreams scale (LuCiD), and the Big-5 inventory (TIPI-J). Cluster analysis validated four synesthesia subtypes&#x02013; Language-Color, Ordinal Linguistic Personification (OLP), Spatial Sequence, and Visualized sensation&#x02013;consistent with prior work. Regression analyses revealed type-specific effects on lucid dreaming: perceptual synesthesia (Visualized sensation, Spatial Sequence) robustly promoted lucid-dream facets&#x02013;especially control, and also insight, dissociation, and positive emotion&#x02013;whereas conceptual synesthesia (Language-Color, OLP) showed negative interactions with Openness and Extraversion, thereby attenuating lucid-dream experiences. Personality analyses further confirmed positive associations between lucid dreaming and Openness and Extraversion, aligning with previous literature. We interpret perceptual synesthesia as an expression of excessive counterfactual-richness that enhances perceptual presence and sensorimotor contingencies during dreaming. These findings both clarify qualitative differences within synesthetic experience and suggest a new direction for understanding synesthesia and lucid dreaming as interconnected cognitive phenomena.</p></abstract>
<kwd-group>
<kwd>consciousness</kwd>
<kwd>counterfactuals</kwd>
<kwd>dreaming</kwd>
<kwd>lucid dream</kwd>
<kwd>perceptual presence</kwd>
<kwd>sensorimotor contingencies</kwd>
<kwd>synesthesia</kwd>
</kwd-group>
<funding-group>
<award-group id="gs1">
<funding-source id="sp1">
<institution-wrap>
<institution>Japan Society for the Promotion of Science</institution>
<institution-id institution-id-type="doi" vocab="open-funder-registry" vocab-identifier="10.13039/open_funder_registry">10.13039/501100001691</institution-id>
</institution-wrap>
</funding-source>
<award-id rid="sp1">24H00175</award-id>
<award-id rid="sp1">24K06485</award-id>
<award-id rid="sp1">24H01563</award-id>
<award-id rid="sp1">23K02969</award-id>
<award-id rid="sp1">21M05343</award-id>
</award-group>
<funding-statement>The author(s) declared that financial support was received for this work and/or its publication. This work was supported by the JSPS Grant-in-Aid for Scientific Research Grant Numbers 24H00175, 24K06485, 24H01563, 23K02969, and 21M05343.</funding-statement>
</funding-group>
<counts>
<fig-count count="2"/>
<table-count count="4"/>
<equation-count count="0"/>
<ref-count count="53"/>
<page-count count="14"/>
<word-count count="9548"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Consciousness Research</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec sec-type="introduction" id="s1">
<label>1</label>
<title>Introduction</title>
<p>Synesthesia is a phenomenon wherein a sensory stimulus induces a different type of sensory perception. For example, viewing letters can evoke color sensations, or hearing sounds can induce perceptions of shapes, demonstrating diverse combinations of sensory modalities between the inducing stimulus (inducer) and the resulting perception (concurrent) (<xref ref-type="bibr" rid="B50">Ward, 2013</xref>). The primary characteristics of synesthesia include (1) stable correspondences maintained over long periods (temporal consistency), (2) idiosyncratic stimulus-perception combinations unique to individuals (e.g., one synesthete may perceive violin sounds as red, while another may perceive them as green; idiosyncrasy), (3) automatic and rapid occurrence of induced perceptions (automaticity), and (4) occurrence limited to a subset of the population (<xref ref-type="bibr" rid="B50">Ward, 2013</xref>; <xref ref-type="bibr" rid="B7">Deroy and Spence, 2013</xref>; <xref ref-type="bibr" rid="B5">Chiou et al., 2013</xref>). In particular, grapheme-color synesthesia is the most extensively studied, appearing in approximately 1% of the population (<xref ref-type="bibr" rid="B50">Ward, 2013</xref>). Additionally, grapheme-color synesthesia has been reported to exhibit relatively stable prevalence from childhood to adulthood (<xref ref-type="bibr" rid="B42">Simner et al., 2009</xref>; <xref ref-type="bibr" rid="B41">Simner and Bain, 2013</xref>; <xref ref-type="bibr" rid="B23">Meier et al., 2014</xref>), while qualitative changes such as reduced temporal consistency and diminished color vividness occur with age (<xref ref-type="bibr" rid="B23">Meier et al., 2014</xref>; <xref ref-type="bibr" rid="B45">Simner et al., 2017</xref>; <xref ref-type="bibr" rid="B29">Pfeifer et al., 2017</xref>).</p>
<p>One of the challenges in synesthesia research is its unclear relation to other general cognitive phenomena. As synesthesia is relatively rare, it is often understood as a special perceptual phenomenon, making it difficult to integrate within general cognitive models. Given that synesthesia involves interactions between multiple sensory modalities, it has been compared to crossmodal correspondences and sound symbolism (<xref ref-type="bibr" rid="B7">Deroy and Spence, 2013</xref>). Crossmodal correspondences refer to broadly shared, natural associations between senses, such as &#x0201C;higher pitches associated with brighter colors&#x0201D; (<xref ref-type="bibr" rid="B24">Mondloch and Maurer, 2004</xref>). Sound symbolism, exemplified by the &#x0201C;bouba/kiki effect,&#x0201D; refers to associations between linguistic sounds and meanings (<xref ref-type="bibr" rid="B19">K&#x000F6;hler, 1929</xref>; <xref ref-type="bibr" rid="B31">Ramachandran and Hubbard, 2001</xref>). These phenomena share crossmodal elements with synesthesia but differ notably by lacking the idiosyncrasy characteristic of synesthesia (<xref ref-type="bibr" rid="B7">Deroy and Spence, 2013</xref>).</p>
<p>In this study, we explore potential links between synesthesia and lucid dreaming. Lucid dreams refer to experiences in which individuals become aware that they are dreaming during the dream itself and may, in some cases, intentionally manipulate dream content (<xref ref-type="bibr" rid="B46">Stickgold et al., 2000</xref>; <xref ref-type="bibr" rid="B20">Laberge, 1985</xref>; <xref ref-type="bibr" rid="B48">Voss et al., 2009</xref>; <xref ref-type="bibr" rid="B3">Barrett, 2017</xref>). Synesthesia is clearly distinct from dreaming; nevertheless, both synesthesia and lucid dreams share the vivid perception of sensations or situations that are not actually present.</p>
<p>Here, we introduce the concept of <italic>perceptual presence</italic>, proposing that synesthesia involves a higher-than-normal perceptual presence. Perceptual presence refers to the sense that something exists in reality and is thought to be determined by counterfactual-richness (<xref ref-type="bibr" rid="B25">No&#x000EB;, 2004</xref>, <xref ref-type="bibr" rid="B26">2006</xref>; <xref ref-type="bibr" rid="B38">Seth, 2014</xref>). Counterfactual richness refers to how richly an experience contains <italic>potentialities</italic> of sensorimotor contingencies (SMC) &#x02013; that is, expectations of how sensation would change if one were to act in a certain way (<xref ref-type="bibr" rid="B25">No&#x000EB;, 2004</xref>, <xref ref-type="bibr" rid="B26">2006</xref>; <xref ref-type="bibr" rid="B38">Seth, 2014</xref>). For example, compare a tomato in front of you with a tomato depicted in a picture. Even if the two produced very similar retinal images, with the real tomato you can anticipate rich SMC potentials; such as &#x0201C;if I rotate my face, I will see its side or back,&#x0201D; &#x0201C;if I move closer, the shading will change,&#x0201D; and &#x0201C;if I pick it up, I will obtain tactile and olfactory information&#x0201D; (<italic>counterfactually-rich</italic>). By contrast, for the painted tomato, no matter how you change your viewpoint, SMCs such as &#x0201C;directly seeing the back of the painted tomato&#x0201D; do not, in principle, hold; thus, the richness of potential SMCs you can anticipate is relatively narrow (<italic>counterfactually-poor</italic>). The <italic>potentialities</italic> of SMCs refer to the fact that the action actually being performed is not required (counterfactual); rather, it concerns whether such potential SMCs are felt as available within experience (<xref ref-type="bibr" rid="B25">No&#x000EB;, 2004</xref>, <xref ref-type="bibr" rid="B26">2006</xref>; <xref ref-type="bibr" rid="B38">Seth, 2014</xref>). For instance, you may feel a potential SMC, in which you can grab and rotate the actual tomato, without actually doing the action. <xref ref-type="bibr" rid="B38">Seth (2014)</xref> argues that when these counterfactually-rich predictions are in place, perceptual presence is enhanced; perceptual content is experienced as &#x0201C;part of the real world&#x0201D; and as &#x0201C;continuous with the world&#x0201D; (<xref ref-type="bibr" rid="B25">No&#x000EB;, 2004</xref>, <xref ref-type="bibr" rid="B26">2006</xref>; <xref ref-type="bibr" rid="B38">Seth, 2014</xref>).</p>
<p>On this view, we can treat the concurrent sensations that arise in synesthesia as additional sensations layered on top of ordinary perception. If so, for the same stimulus, the richness of potential SMCs&#x02014;&#x0201C;if I were to move like this, my sensations should change like that&#x0201D;&#x02014;can increase, and the experience can involve higher <italic>counterfactually richness</italic>. Following <xref ref-type="bibr" rid="B38">Seth&#x00027;s (2014)</xref> framework, synesthesia may therefore strengthen perceptual presence insofar as it involves higher counterfactual richness .</p>
<p>In lucid dreams&#x02013;particularly those with high controllability&#x02013;the possibility of SMCs increases significantly, as dreamers can intentionally alter or control the dream content, enhancing counterfactual-richness. For instance, attempts like &#x0201C;moving/transforming objects,&#x0201D; &#x0201C;trying to fly&#x0201D; or &#x0201C;trying to switch scenes,&#x0201D; successively create new SMCs within the dream. Accordingly, in lucid dreams, particularly those with high controllability, the possibility of SMCs is expanded, rendering the experience more counterfactually rich. As a result, the degree to which the dream experience is felt as &#x0201C;really there&#x0201D;&#x02014;that is, its perceptual presence&#x02014;may be enhanced (<xref ref-type="bibr" rid="B38">Seth, 2014</xref>).</p>
<p>If synesthesia indeed results in excessive counterfactual-richness, synesthetes might similarly experience heightened counterfactual-richness within their dreams. Consequently, it can be hypothesized that synesthetes would experience controllable lucid dreams more frequently. In fact, previous research suggests that synesthetes have a higher incidence of lucid dreaming. According to <xref ref-type="bibr" rid="B18">Khallieva et al. (2022)</xref>, 80.6% of grapheme-color synesthetes reported experiencing lucid dreams, compared to 53.1% of a control group. However, this study has several limitations. First, it exclusively examined grapheme-color synesthesia despite the existence of multiple synesthesia types, leaving unexplored the relationship between other synesthetic forms and lucid dreaming. Additionally, it did not clearly outline the psychological and cognitive mechanisms underlying the facilitation of lucid dreaming by synesthesia.</p>
<p>Both synesthesia and lucid dreaming are known to be associated with personality traits. Synesthesia, in particular, shows a relatively strong correlation with the Big-5 personality trait of Openness, and some studies report a weak association with Neuroticism, although results for the latter have been inconsistent (<xref ref-type="bibr" rid="B35">Rouw and Scholte, 2016</xref>; <xref ref-type="bibr" rid="B39">Shi and Matsuda, 2024</xref>). Lucid dreaming has also been notably associated with Openness (<xref ref-type="bibr" rid="B15">Hess et al., 2017</xref>), suggesting that personality traits might moderate the relationship between synesthesia and lucid dreaming.</p>
<p>Based on these considerations, we examined the association between synesthesia subtypes and the frequency of lucid dreaming&#x02013;particularly controllable lucid dreaming&#x02013;while accounting for personality traits. Specifically, after entering Big-Five personality traits as covariates, we tested whether endorsement of the four major synesthesia types&#x02013;Language-color (colors accompanying letters/numbers and related linguistic stimuli; <xref ref-type="bibr" rid="B2">Baron-Cohen, 1996</xref>), Ordinal Linguistic Personification (OLP; personification attributes such as personality and/or gender assigned to numbers, weekdays, and other ordinal sequences; <xref ref-type="bibr" rid="B43">Simner and Holenstein, 2007</xref>), Spatial sequence (experiencing sequences such as numbers, times, height, temperature as having a continuous spatial layout; <xref ref-type="bibr" rid="B11">Eagleman, 2009</xref>), and Visualized sensation (a broad &#x0201C;sensation-to-vision&#x0201D; type in which sensory inputs such as sounds, taste, pain and emotions elicit visual concurrents such as colors and shapes; <xref ref-type="bibr" rid="B5">Chiou et al., 2013</xref>; <xref ref-type="bibr" rid="B51">Ward et al., 2006</xref>)&#x02014;was associated with a higher frequency of lucid dreams. In addition, we exploratorily evaluated whether personality traits (e.g., Openness) might amplify or attenuate these associations by testing interactions between synesthesia subtype indicators and personality traits.</p>
<p>We conducted a survey of 808 young adults using a synesthesia self-report questionnaire to assess synesthesia types, the Japanese version of the TIPI (TIPI-J; <xref ref-type="bibr" rid="B28">Oshio et al., 2012</xref>) to measure personality traits, and the LuCiD (the Lucidity and Consciousness in Dreams scale) to evaluate the frequency and quality of lucid dreams (<xref ref-type="bibr" rid="B52">Ward and Simner, 2022</xref>; <xref ref-type="bibr" rid="B49">Voss et al., 2013</xref>).</p>
<p>While the standard method for assessing synesthesia is the temporal consistency test (<xref ref-type="bibr" rid="B12">Eagleman et al., 2007</xref>), this study employed a self-report method due to the necessity of comprehensively evaluating multiple synesthesia types and examining their relationship with lucid dreaming in a large sample. Although self-report methods may yield a higher proportion of synesthetes compared to consistency tests, their validity has been supported by previous research (<xref ref-type="bibr" rid="B35">Rouw and Scholte, 2016</xref>; <xref ref-type="bibr" rid="B52">Ward and Simner, 2022</xref>).</p>
<p>The primary objective of this study is to understand synesthesia not as a unique perceptual phenomenon but in relation to more general cognitive phenomena, including lucid dreaming. To position synesthesia within a general cognitive framework, it is crucial to capture the diverse and dynamic experiences characteristic of synesthesia. Therefore, this study adopted the self-report method, which effectively captures a broader range of self-reported synesthesia-like experiences that are challenging to detect with temporal consistency tests, and examined these associations within a large sample. Through this approach, the study aims to demonstrate that synesthesia can be understood within the context of broader cognitive models, including lucid dreaming. Ultimately, this research proposes a new perspective, positioning synesthesia along a continuum of cognitive phenomena rather than viewing it as an isolated phenomenon.</p></sec>
<sec sec-type="materials|methods" id="s2">
<label>2</label>
<title>Materials and methods</title>
<sec>
<label>2.1</label>
<title>Participants</title>
<p>Participants in this study were 808 undergraduate students enrolled in a university psychology course on &#x0201C;Dreams and Sleep&#x0201D; within the general psychology curriculum. Of these, 616 students (241 males, 371 females; mean age = 19.44 years, SD = 1.295) provided complete responses to all questionnaires and were included in the analyses. This study was approved by the Ethics Committee of Toyo University (Approval number: P240021). All participants provided written informed consent in accordance with the Declaration of Helsinki.</p></sec>
<sec>
<label>2.2</label>
<title>Questionnaire</title>
<p>The following three questionnaires were utilized in this study:</p>
<list list-type="bullet">
<list-item><p>Synesthesia self-report questionnaire: this questionnaire was adapted from the synesthesia self-report survey developed by <xref ref-type="bibr" rid="B52">Ward and Simner (2022)</xref> to comprehensively and uniformly capture a wide range of synesthetic inducer&#x02013;concurrent pairings. Participants reported whether they experienced induced sensations (concurrents), such as color, shape, spatial arrangement, personality, gender, taste, or touch, in response to diverse inducing stimuli such as letters, numbers, sounds, pain, time, and sequences (see <xref ref-type="supplementary-material" rid="SM1">Supplementary Table S1</xref>).</p></list-item></list>
<p>Typically, synesthesia is identified using behavioral diagnostic approaches that assess the consistency of inducer&#x02013;concurrent mappings across time (e.g., consistency-based tests; <xref ref-type="bibr" rid="B12">Eagleman et al., 2007</xref>). However, this study used a self-report method to broadly assess both visual and non-visual synesthetic experiences, including spatial arrangement, personality, taste, and touch. Although self-report methods may yield higher estimates of synesthesia prevalence than methods based on temporal consistency (<xref ref-type="bibr" rid="B35">Rouw and Scholte, 2016</xref>; <xref ref-type="bibr" rid="B52">Ward and Simner, 2022</xref>) and may include individuals whose experiences are not temporally consistent, previous studies have demonstrated the validity of self-reports. For example, self-reported synesthetic profiles have been consistently correlated with objective behavioral indicators such as sensory hypersensitivity and imagery abilities (<xref ref-type="bibr" rid="B35">Rouw and Scholte, 2016</xref>). Furthermore, consistent cognitive profiles of synesthetes based on self-report data have also been established (<xref ref-type="bibr" rid="B52">Ward and Simner, 2022</xref>). Thus, while acknowledging its limitations, including the potential for inflated prevalence rates, the use of self-report methods is justified given the aims of this research.</p>
<p>Previous self-report studies have shown that synesthetic experiences cluster into multiple types, among which four types&#x02014;Language &#x02013; color, OLP, Spatial sequence, and Visualized sensation&#x02014;occur with particularly high frequency. Accordingly, following prior work, the present study focused on these four major types (<xref ref-type="bibr" rid="B52">Ward and Simner, 2022</xref>). Other less prevalent types were also assessed in the questionnaire (see <xref ref-type="supplementary-material" rid="SM1">Supplementary Table S1</xref>), but were excluded from the main inferential analyses due to low endorsement rates and limited statistical power.</p>
<list list-type="bullet">
<list-item><p>Japanese Version of the TIPI (TIPI-J): the TIPI-J is a concise and efficient scale for assessing personality traits based on the Big Five dimensions: Extraversion, Neuroticism (emotional instability), Openness, Conscientiousness, and Agreeableness. Each dimension is measured by two items (one positively worded and one negatively worded) using a 7-point Likert scale (total of 10 items) (<xref ref-type="bibr" rid="B28">Oshio et al., 2012</xref>). Although Openness and Neuroticism were of a priori interest based on prior work, all Big Five traits were included in the exploratory analyses.</p></list-item>
<list-item><p>LuCiD (the Lucidity and Consciousness in Dreams scale): Dream lucidity was assessed using the LuCiD (<xref ref-type="bibr" rid="B49">Voss et al., 2013</xref>). Participants were asked to briefly describe any lucid dream experiences during the past year and then completed 28 items covering eight subscales &#x02013; insight (recognition of dreaming), control (ability to manipulate dream content), thought (logical reasoning), realism (dream realism), memory (memory access within dreams), dissociation (sense of detachment in dreams), negative emotion, and positive emotion. Items were rated on a 6-point scale (1 = strongly disagree, 6 = strongly agree) and recoded to the original 0&#x02013;5 scoring prior to computing subscale and total scores (0 = strongly disagree, 5 = strongly agree; <xref ref-type="bibr" rid="B49">Voss et al., 2013</xref>). A Japanese version was created via back translation for use in this study. The objective was to examine in detail how specific synesthesia types and personality traits influence different aspects of lucid dreaming experiences.</p></list-item>
</list>
<p>Using the questionnaire survey described above, this study aims to comprehensively and multidimensionally analyze the relationship between synesthesia and lucid dreaming, thereby offering new insights into the task of situating synesthesia within a continuum of general cognitive phenomena.</p></sec>
<sec>
<label>2.3</label>
<title>Procedure</title>
<p>The survey was conducted during psychology classes focused on &#x0201C;Dreams and Sleep.&#x0201D; The questionnaires were distributed by the researchers responsible for the classes, and participants responded individually. The survey was conducted across three sessions: personality traits (TIPI-J) in the first, lucid dreaming experiences (LuCiD) in the second, and the synesthesia self-report questionnaire in the third. Each questionnaire session took approximately 10 min.</p></sec>
<sec>
<label>2.4</label>
<title>Statistical analysis</title>
<p>All analyses were conducted in R (version 4.3.2). Only participants with complete responses to all questionnaires were included in the analyses (<italic>n</italic> = 616; listwise deletion).</p>
<sec>
<label>2.4.1</label>
<title>Coding of synesthesia types</title>
<p>Based on the synesthesia self-report questionnaire (<xref ref-type="supplementary-material" rid="SM1">Supplementary Table S1</xref>), four major synesthesia types were operationalized as follows: Type 1 (Language&#x02013;color; Q1), Type 2 (OLP/Personification; Q6), Type 3 (Spatial sequence; Q5), and Type 4 (Visualized sensation; Q2&#x02013;Q3). Each type was coded as a binary indicator (0 = absent, 1 = present). A participant was coded as present for a given type if they answered &#x0201C;Yes&#x0201D; to the corresponding question and endorsed at least one relevant inducer&#x02013;concurrent item within that type (endorsement of all items was not required). Participants could endorse multiple types.</p></sec>
<sec>
<label>2.4.2</label>
<title>Reliability and correlational analyses</title>
<p>Internal consistency of the LuCiD total score and each subscale was evaluated using Cronbach&#x00027;s &#x003B1;. Pearson&#x00027;s correlation coefficients were computed between LuCiD scores (total and subscales) and Big Five personality traits (TIPI-J) to assess their associations.</p></sec>
<sec>
<label>2.4.3</label>
<title>Cluster analysis</title>
<p>Following prior work using the same self-report approach, synesthetic experiences can be grouped into multiple clusters, of which the present study focused on the four most prevalent types (Language&#x02013;color, OLP, Spatial sequence, and Visualized sensation; <xref ref-type="bibr" rid="B52">Ward and Simner, 2022</xref>). Therefore, hierarchical cluster analysis was conducted as a construct validity check to examine whether item-level response patterns reproduced the expected four-group structure. We used Euclidean distance and Ward&#x00027;s minimum-variance method, and set the number of clusters a priori to 4 (<italic>k</italic> &#x0003D; 4) based on our predefined focus on these four major types.</p></sec>
<sec>
<label>2.4.4</label>
<title>Regression analyses</title>
<p>Multiple linear regression analyses were conducted with the LuCiD total score and each LuCiD subscale score as dependent variables. Predictors included the four synesthesia-type indicators and the Big Five trait scores. Interaction terms between synesthesia types and personality traits were also considered to test moderation. All variables were standardized (<italic>z</italic>-scored) prior to regression analyses. Model selection was performed using an Akaike Information Criterion (AIC)&#x02013;based stepwise procedure (step function in the stats package), which balances model fit and complexity. Statistical significance was evaluated using two-tailed tests with &#x003B1; = 0.05. Because predictors were selected using an AIC-based stepwise procedure, we report coefficients for all predictors retained in the final models, regardless of whether they reach the conventional significance threshold.</p></sec></sec></sec>
<sec sec-type="results" id="s3">
<label>3</label>
<title>Results</title>
<sec>
<label>3.1</label>
<title>Synesthesia endorsement rate and cluster structure</title>
<p>Responses to the Synesthesia Self-Report Questionnaire were summarized for the four primary synesthesia types targeted in the present study (Type 1: Language&#x02013;color, Type 2: OLP, Type 3: Spatial sequence, Type 4: Visualized sensation).</p>
<p><xref ref-type="fig" rid="F1">Figure 1a</xref> shows the endorsement rate (self-report) of each synesthesia type and <xref ref-type="fig" rid="F1">Figure 1b</xref> the distribution of the number of synesthesia types endorsed by participants. A participant was considered to endorse a given synesthesia type if they responded &#x0201C;yes&#x0201D; to at least one item belonging to that type; participants could endorse multiple types. A value of zero in panel (<bold>b</bold>) indicates participants who did not endorse any of the four primary synesthesia types. Accordingly, the overall endorsement rate (68.0%) reflects the endorsement of at least one synesthesia-like experience.</p>
<fig position="float" id="F1">
<label>Figure 1</label>
<caption><p>Endorsement rate and distribution of synesthesia types based on the Synesthesia Self-Report Questionnaire. <bold>(a)</bold> Percentage of participants reporting at least one of the four primary synesthesia types (Type 1: Language-color, Type 2: OLP, Type 3: Spatial-sequence, Type 4: Visualized sensation). <bold>(b)</bold> Frequency distribution (%) of the number of synesthesia types simultaneously reported by participants (0 indicates no endorsement of any of the four primary types).</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpsyg-17-1733841-g0001.tif">
<alt-text content-type="machine-generated">Two bar graphs display synesthesia data. Graph a shows frequency percentages by four types of synesthesia, with roughly equal frequencies, highest at type one and four. Graph b shows frequency percentages by number of synesthesias, with frequency decreasing from zero to four.</alt-text>
</graphic>
</fig>
<p>To examine the structure of item-level synesthesia responses, hierarchical cluster analysis (Euclidean distance and Ward&#x00027;s minimum-variance method, <italic>k</italic> &#x0003D; 4) was conducted. The dendrogram and cluster assignment are shown in <xref ref-type="supplementary-material" rid="SM1">Supplementary Figure S1</xref>. The four-cluster solution yielded the following clusters: (i) Language&#x02013;color (e.g., letters&#x02013;color, numbers&#x02013;color), (ii) OLP (e.g., numbers&#x02013;personality, days&#x02013;personality), (iii) spatial sequence (e.g., numbers&#x02013;spatial arrangement, months&#x02013;spatial arrangement), and (iv) Visualized sensation (e.g., music&#x02013;color, emotion&#x02013;color). In this solution, the item &#x0201C;Posture&#x02013;color&#x0201D; was assigned to the Language&#x02013;color cluster (<xref ref-type="supplementary-material" rid="SM1">Supplementary Figure S1</xref>).</p></sec>
<sec>
<label>3.2</label>
<title>LuCiD internal consistency and correlations with personality traits</title>
<p>Internal consistency of the LuCiD was evaluated using Cronbach&#x00027;s &#x003B1; coefficients (<xref ref-type="table" rid="T1">Table 1</xref>). Cronbach&#x00027;s &#x003B1; was 0.79 for the LuCiD total score. Subscale &#x003B1; values were 0.87 (Insight), 0.82 (Control), 0.70 (Thought), 0.66 (Realism), 0.62 (Memory), 0.69 (Dissociation), 0.86 (Positive Emotion), and 0.87 (Negative Emotion).</p>
<table-wrap position="float" id="T1">
<label>Table 1</label>
<caption><p>Internal consistency (Cronbach&#x00027;s &#x003B1;) and descriptive statistics for the LuCiD total score and subscales (<italic>N</italic> &#x0003D; 616).</p></caption>
<table frame="box" rules="all">
<thead>
<tr>
<th valign="top" align="left"><bold>Scale</bold></th>
<th valign="top" align="center"><bold><italic>k</italic></bold></th>
<th valign="top" align="center"><bold><italic>M</italic></bold></th>
<th valign="top" align="center"><bold><italic>SD</italic></bold></th>
<th valign="top" align="center"><bold>Range</bold></th>
<th valign="top" align="center"><bold>Cronbach&#x00027;s &#x003B1;</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">LuCiD total</td>
<td valign="top" align="center">28</td>
<td valign="top" align="center">1.63</td>
<td valign="top" align="center">0.84</td>
<td valign="top" align="center">0.00&#x02013;4.75</td>
<td valign="top" align="center">0.79</td>
</tr>
<tr>
<td valign="top" align="left">Insight</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">1.67</td>
<td valign="top" align="center">1.29</td>
<td valign="top" align="center">0.00&#x02013;5.00</td>
<td valign="top" align="center">0.87</td>
</tr>
<tr>
<td valign="top" align="left">Control</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">1.31</td>
<td valign="top" align="center">1.19</td>
<td valign="top" align="center">0.00&#x02013;5.00</td>
<td valign="top" align="center">0.82</td>
</tr>
<tr>
<td valign="top" align="left">Thought</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">1.74</td>
<td valign="top" align="center">1.27</td>
<td valign="top" align="center">0.00&#x02013;5.00</td>
<td valign="top" align="center">0.70</td>
</tr>
<tr>
<td valign="top" align="left">Realism</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">2.09</td>
<td valign="top" align="center">1.27</td>
<td valign="top" align="center">0.00&#x02013;5.00</td>
<td valign="top" align="center">0.66</td>
</tr>
<tr>
<td valign="top" align="left">Memory</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">1.40</td>
<td valign="top" align="center">1.02</td>
<td valign="top" align="center">0.00&#x02013;4.75</td>
<td valign="top" align="center">0.62</td>
</tr>
<tr>
<td valign="top" align="left">Dissociation</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">1.05</td>
<td valign="top" align="center">1.12</td>
<td valign="top" align="center">0.00&#x02013;5.00</td>
<td valign="top" align="center">0.69</td>
</tr>
<tr>
<td valign="top" align="left">Positive emotion</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">2.19</td>
<td valign="top" align="center">1.58</td>
<td valign="top" align="center">0.00&#x02013;5.00</td>
<td valign="top" align="center">0.86</td>
</tr>
<tr>
<td valign="top" align="left">Negative emotion</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">2.21</td>
<td valign="top" align="center">1.59</td>
<td valign="top" align="center">0.00&#x02013;5.00</td>
<td valign="top" align="center">0.87</td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>Subscale scores and the total LuCiD score were computed as mean item scores of their constituent items. <italic>k</italic> denotes the number of items in each subscale. <italic>M</italic> and <italic>SD</italic> are mean item scores on the 0&#x02013;5 scale. Items were rated on a 6-point scale and recoded to match the original LuCiD scoring (0 = strongly disagree, 5 = strongly agree).</p>
</table-wrap-foot>
</table-wrap>
<p><xref ref-type="table" rid="T1">Table 1</xref> also reports descriptive statistics for the LuCiD total score and each subscale (mean item scores on the 0&#x02013;5 scale). Because lucidity was operationalized as continuous LuCiD scores, participants were not dichotomized into &#x0201C;lucid&#x0201D; vs. &#x0201C;non-lucid&#x0201D; dreamers; accordingly, we do not report a single categorical count of lucid dreamers. Instead, the distribution of scores provides an overview of individual differences in lucid-dream features (e.g., Insight: <italic>M</italic> &#x0003D; 1.67, <italic>SD</italic> &#x0003D; 1.29, range = 0.00&#x02013;5.00; LuCiD total: <italic>M</italic> &#x0003D; 1.63, <italic>SD</italic> &#x0003D; 0.84, range = 0.00&#x02013;4.75).</p>
<p>Pearson correlation coefficients between LuCiD scores (total and subscales) and Big Five personality traits (TIPI-J) are shown in <xref ref-type="table" rid="T2">Table 2</xref>. The LuCiD total score was positively correlated with Extraversion (<italic>r</italic> &#x0003D; 0.15, <italic>p</italic> &#x0003C; 0.001) and Openness (<italic>r</italic> &#x0003D; 0.08, <italic>p</italic> &#x0003C; 0.05). Control and Thought were also positively correlated with Extraversion (Control: <italic>r</italic> &#x0003D; 0.13, <italic>p</italic> &#x0003C; 0.01; Thought: <italic>r</italic> &#x0003D; 0.11, <italic>p</italic> &#x0003C; 0.01) and Openness (Control: <italic>r</italic> &#x0003D; 0.09, <italic>p</italic> &#x0003C; 0.05; Thought: <italic>r</italic> &#x0003D; 0.11, <italic>p</italic> &#x0003C; 0.01).</p>
<table-wrap position="float" id="T2">
<label>Table 2</label>
<caption><p>Correlation coefficients and <italic>p</italic>-values between LuCiD scale (total and subscales) and Big-5 personality traits (TIPI-J).</p></caption>
<table frame="box" rules="all">
<thead>
<tr>
<th valign="top" align="left"><bold>LuCiD scale</bold></th>
<th valign="top" align="center"><bold>Extraversion</bold></th>
<th valign="top" align="center"><bold>Agreeableness</bold></th>
<th valign="top" align="center"><bold>Conscientiousness</bold></th>
<th valign="top" align="center"><bold>Neuroticism</bold></th>
<th valign="top" align="center"><bold>Openness</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Total score</td>
<td valign="top" align="center">0.15<sup>&#x0002A;&#x0002A;&#x0002A;</sup></td>
<td valign="top" align="center">&#x02212;0.00</td>
<td valign="top" align="center">0.05</td>
<td valign="top" align="center">0.04</td>
<td valign="top" align="center">0.08<sup>&#x0002A;</sup></td>
</tr>
<tr>
<td valign="top" align="left">Insight</td>
<td valign="top" align="center">0.12<sup>&#x0002A;&#x0002A;</sup></td>
<td valign="top" align="center">0.03</td>
<td valign="top" align="center">0.08</td>
<td valign="top" align="center">0.06</td>
<td valign="top" align="center">0.06</td>
</tr>
<tr>
<td valign="top" align="left">Control</td>
<td valign="top" align="center">0.13<sup>&#x0002A;&#x0002A;</sup></td>
<td valign="top" align="center">&#x02212;0.04</td>
<td valign="top" align="center">0.07</td>
<td valign="top" align="center">0.03</td>
<td valign="top" align="center">0.09<sup>&#x0002A;</sup></td>
</tr>
<tr>
<td valign="top" align="left">Thought</td>
<td valign="top" align="center">0.11<sup>&#x0002A;&#x0002A;</sup></td>
<td valign="top" align="center">0.05</td>
<td valign="top" align="center">0.02</td>
<td valign="top" align="center">0.05</td>
<td valign="top" align="center">0.11<sup>&#x0002A;&#x0002A;</sup></td>
</tr>
<tr>
<td valign="top" align="left">Realism</td>
<td valign="top" align="center">0.05</td>
<td valign="top" align="center">0.04</td>
<td valign="top" align="center">&#x02212;0.00</td>
<td valign="top" align="center">0.06</td>
<td valign="top" align="center">0.02</td>
</tr>
<tr>
<td valign="top" align="left">Memory</td>
<td valign="top" align="center">0.16<sup>&#x0002A;&#x0002A;&#x0002A;</sup></td>
<td valign="top" align="center">&#x02212;0.04</td>
<td valign="top" align="center">0.03</td>
<td valign="top" align="center">0.03</td>
<td valign="top" align="center">0.09<sup>&#x0002A;</sup></td>
</tr>
<tr>
<td valign="top" align="left">Dissociation</td>
<td valign="top" align="center">0.07</td>
<td valign="top" align="center">&#x02212;0.09<sup>&#x0002A;</sup></td>
<td valign="top" align="center">0.02</td>
<td valign="top" align="center">-0.06</td>
<td valign="top" align="center">0.01</td>
</tr>
<tr>
<td valign="top" align="left">Negative Emotion</td>
<td valign="top" align="center">&#x02212;0.03</td>
<td valign="top" align="center">&#x02212;0.03</td>
<td valign="top" align="center">&#x02212;0.08<sup>&#x0002A;</sup></td>
<td valign="top" align="center">0.00</td>
<td valign="top" align="center">&#x02212;0.01</td>
</tr>
<tr>
<td valign="top" align="left">Positive Emotion</td>
<td valign="top" align="center">0.12<sup>&#x0002A;&#x0002A;</sup></td>
<td valign="top" align="center">0.01</td>
<td valign="top" align="center">0.04</td>
<td valign="top" align="center">&#x02212;0.01</td>
<td valign="top" align="center">&#x02212;0.01</td>
</tr></tbody>
</table>
<table-wrap-foot>
<p><sup>&#x0002A;</sup><italic>p</italic> &#x0003C; 0.05, <sup>&#x0002A;&#x0002A;</sup><italic>p</italic> &#x0003C; 0.01, <sup>&#x0002A;&#x0002A;&#x0002A;</sup><italic>p</italic> &#x0003C; 0.001.</p>
</table-wrap-foot>
</table-wrap></sec>
<sec>
<label>3.3</label>
<title>Regression analyses predicting LuCiD outcomes</title>
<p>Multiple linear regression analyses were conducted for the LuCiD total score and each LuCiD subscale (<xref ref-type="table" rid="T3">Tables 3</xref>, <xref ref-type="table" rid="T4">4</xref>). Predictors retained in the final models were selected using an AIC-based stepwise procedure. Standardized coefficients are reported.</p>
<table-wrap position="float" id="T3">
<label>Table 3</label>
<caption><p>Results of multivariate regression analysis for LuCiD overall score.</p></caption>
<table frame="box" rules="all">
<thead>
<tr>
<th valign="top" align="left"><bold>Dependent variable</bold></th>
<th valign="top" align="center"><bold>Predictor</bold></th>
<th valign="top" align="center"><bold>Estimate</bold></th>
<th valign="top" align="center"><bold>Std. Error</bold></th>
<th valign="top" align="center"><bold><italic>t</italic>-value</bold></th>
<th valign="top" align="center"><bold><italic>p</italic>-value</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left" rowspan="8">LuCiD overall score</td>
<td valign="top" align="center">Type 1: Language-color</td>
<td valign="top" align="center">&#x02212;0.038</td>
<td valign="top" align="center">0.040</td>
<td valign="top" align="center">&#x02212;0.94</td>
<td valign="top" align="center">0.347</td>
</tr>
 <tr>
<td valign="top" align="center">Type 2: OLP</td>
<td valign="top" align="center">&#x02212;0.016</td>
<td valign="top" align="center">0.041</td>
<td valign="top" align="center">&#x02212;0.39</td>
<td valign="top" align="center">0.700</td>
</tr>
 <tr>
<td valign="top" align="center">Type 3: Spatial sequence</td>
<td valign="top" align="center">0.104</td>
<td valign="top" align="center">0.041</td>
<td valign="top" align="center">2.55</td>
<td valign="top" align="center">0.011<sup>&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Type 4: Visualized sensation</td>
<td valign="top" align="center">0.236</td>
<td valign="top" align="center">0.041</td>
<td valign="top" align="center">5.75</td>
<td valign="top" align="center">&#x0003C;0.001<sup>&#x0002A;&#x0002A;&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Extraversion (P1)</td>
<td valign="top" align="center">0.105</td>
<td valign="top" align="center">0.039</td>
<td valign="top" align="center">2.70</td>
<td valign="top" align="center">0.007<sup>&#x0002A;&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Openness (P5)</td>
<td valign="top" align="center">0.053</td>
<td valign="top" align="center">0.038</td>
<td valign="top" align="center">1.37</td>
<td valign="top" align="center">0.171</td>
</tr>
 <tr>
<td valign="top" align="center">Type1 &#x000D7; Extraversion</td>
<td valign="top" align="center">&#x02212;0.100</td>
<td valign="top" align="center">0.038</td>
<td valign="top" align="center">&#x02212;2.60</td>
<td valign="top" align="center">0.010<sup>&#x0002A;&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Type2 &#x000D7; Openness</td>
<td valign="top" align="center">&#x02212;0.079</td>
<td valign="top" align="center">0.039</td>
<td valign="top" align="center">&#x02212;2.01</td>
<td valign="top" align="center">0.044<sup>&#x0002A;</sup></td>
</tr>
<tr>
<td valign="top" align="left">Adjusted <italic>R</italic><sup>2</sup></td>
<td valign="top" align="center" colspan="4">0.103</td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>Predictors were selected using an AIC-based stepwise procedure. <sup>&#x0002A;</sup><italic>p</italic> &#x0003C; 0.05, <sup>&#x0002A;&#x0002A;</sup><italic>p</italic> &#x0003C; 0.01, <sup>&#x0002A;&#x0002A;&#x0002A;</sup><italic>p</italic> &#x0003C; 0.001.</p>
</table-wrap-foot>
</table-wrap>
<table-wrap position="float" id="T4">
<label>Table 4</label>
<caption><p>Results of multiple regression analyses for LuCiD subscales.</p></caption>
<table frame="box" rules="all">
<thead>
<tr>
<th valign="top" align="left"><bold>Dependent variable</bold></th>
<th valign="top" align="center"><bold>Predictor</bold></th>
<th valign="top" align="center"><bold>Estimate</bold></th>
<th valign="top" align="center"><bold>Std. Error</bold></th>
<th valign="top" align="center"><bold><italic>t</italic>-value</bold></th>
<th valign="top" align="center"><bold><italic>p</italic>-value</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left" rowspan="4">Insight (d1)</td>
<td valign="top" align="center">Type 1 (Language&#x02013;color)</td>
<td valign="top" align="center">&#x02212;0.092</td>
<td valign="top" align="center">0.040</td>
<td valign="top" align="center">&#x02212;2.28</td>
<td valign="top" align="center">0.023<sup>&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Type 4 (Visualized sensation)</td>
<td valign="top" align="center">0.152</td>
<td valign="top" align="center">0.041</td>
<td valign="top" align="center">3.66</td>
<td valign="top" align="center">&#x0003C;0.001<sup>&#x0002A;&#x0002A;&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Extraversion (P1)</td>
<td valign="top" align="center">0.082</td>
<td valign="top" align="center">0.040</td>
<td valign="top" align="center">2.06</td>
<td valign="top" align="center">0.040<sup>&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Type 1 &#x000D7; Extraversion</td>
<td valign="top" align="center">&#x02212;0.107</td>
<td valign="top" align="center">0.039</td>
<td valign="top" align="center">&#x02212;2.73</td>
<td valign="top" align="center">0.007<sup>&#x0002A;&#x0002A;</sup></td>
</tr>
<tr>
<td valign="top" align="left">Adjusted <italic>R</italic><sup>2</sup></td>
<td valign="top" align="center" colspan="4">0.061</td>
</tr>
<tr>
<td valign="top" align="left" rowspan="5">Control (d2)</td>
<td valign="top" align="center">Type 1 (Language&#x02013;color)</td>
<td valign="top" align="center">&#x02212;0.089</td>
<td valign="top" align="center">0.040</td>
<td valign="top" align="center">&#x02212;2.19</td>
<td valign="top" align="center">0.029<sup>&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Type 3 (Spatial sequence)</td>
<td valign="top" align="center">0.103</td>
<td valign="top" align="center">0.041</td>
<td valign="top" align="center">2.48</td>
<td valign="top" align="center">0.013<sup>&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Type 4 (Visualized sensation)</td>
<td valign="top" align="center">0.197</td>
<td valign="top" align="center">0.042</td>
<td valign="top" align="center">4.74</td>
<td valign="top" align="center">&#x0003C;0.001<sup>&#x0002A;&#x0002A;&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Extraversion (P1)</td>
<td valign="top" align="center">0.097</td>
<td valign="top" align="center">0.039</td>
<td valign="top" align="center">2.47</td>
<td valign="top" align="center">0.014<sup>&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Type 2 (OLP) &#x000D7; Extraversion</td>
<td valign="top" align="center">&#x02212;0.081</td>
<td valign="top" align="center">0.038</td>
<td valign="top" align="center">&#x02212;2.14</td>
<td valign="top" align="center">0.033<sup>&#x0002A;</sup></td>
</tr>
<tr>
<td valign="top" align="left">Adjusted <italic>R</italic><sup>2</sup></td>
<td valign="top" align="center" colspan="4">0.076</td>
</tr>
<tr>
<td valign="top" align="left" rowspan="4">Thought (d3)</td>
<td valign="top" align="center">Type 4 (Visualized sensation)</td>
<td valign="top" align="center">0.185</td>
<td valign="top" align="center">0.041</td>
<td valign="top" align="center">4.49</td>
<td valign="top" align="center">&#x0003C;0.001<sup>&#x0002A;&#x0002A;&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Openness (P5)</td>
<td valign="top" align="center">0.091</td>
<td valign="top" align="center">0.039</td>
<td valign="top" align="center">2.33</td>
<td valign="top" align="center">0.020<sup>&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Type 1 &#x000D7; Extraversion</td>
<td valign="top" align="center">&#x02212;0.107</td>
<td valign="top" align="center">0.040</td>
<td valign="top" align="center">&#x02212;2.63</td>
<td valign="top" align="center">0.009<sup>&#x0002A;&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Type 4 &#x000D7; Extraversion</td>
<td valign="top" align="center">0.093</td>
<td valign="top" align="center">0.042</td>
<td valign="top" align="center">2.22</td>
<td valign="top" align="center">0.027<sup>&#x0002A;</sup></td>
</tr>
<tr>
<td valign="top" align="left">Adjusted <italic>R</italic><sup>2</sup></td>
<td valign="top" align="center" colspan="4">0.073</td>
</tr>
<tr>
<td valign="top" align="left">Realism (d4)</td>
<td valign="top" align="center">Type 4 (Visualized sensation)</td>
<td valign="top" align="center">0.105</td>
<td valign="top" align="center">0.042</td>
<td valign="top" align="center">2.50</td>
<td valign="top" align="center">0.013<sup>&#x0002A;</sup></td>
</tr>
<tr>
<td valign="top" align="left">Adjusted <italic>R</italic><sup>2</sup></td>
<td valign="top" align="center" colspan="4">0.024</td>
</tr>
<tr>
<td valign="top" align="left" rowspan="2">Memory (d5)</td>
<td valign="top" align="center">Type 4 (Visualized sensation)</td>
<td valign="top" align="center">0.180</td>
<td valign="top" align="center">0.041</td>
<td valign="top" align="center">4.37</td>
<td valign="top" align="center">&#x0003C;0.001<sup>&#x0002A;&#x0002A;&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Extraversion (P1)</td>
<td valign="top" align="center">0.123</td>
<td valign="top" align="center">0.039</td>
<td valign="top" align="center">3.14</td>
<td valign="top" align="center">0.002<sup>&#x0002A;&#x0002A;</sup></td>
</tr>
<tr>
<td valign="top" align="left">Adjusted <italic>R</italic><sup>2</sup></td>
<td valign="top" align="center" colspan="4">0.071</td>
</tr>
<tr>
<td valign="top" align="left" rowspan="4">Dissociation (d6)</td>
<td valign="top" align="center">Type 3 (Spatial sequence)</td>
<td valign="top" align="center">0.093</td>
<td valign="top" align="center">0.042</td>
<td valign="top" align="center">2.24</td>
<td valign="top" align="center">0.026<sup>&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Type 4 (Visualized sensation)</td>
<td valign="top" align="center">0.178</td>
<td valign="top" align="center">0.041</td>
<td valign="top" align="center">4.31</td>
<td valign="top" align="center">&#x0003C;0.001<sup>&#x0002A;&#x0002A;&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Agreeableness (P2)</td>
<td valign="top" align="center">&#x02212;0.089</td>
<td valign="top" align="center">0.041</td>
<td valign="top" align="center">&#x02212;2.19</td>
<td valign="top" align="center">0.029<sup>&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Type 2 (OLP) &#x000D7; Openness (P5)</td>
<td valign="top" align="center">&#x02212;0.119</td>
<td valign="top" align="center">0.040</td>
<td valign="top" align="center">&#x02212;2.95</td>
<td valign="top" align="center">0.003<sup>&#x0002A;&#x0002A;</sup></td>
</tr>
<tr>
<td valign="top" align="left">Adjusted <italic>R</italic><sup>2</sup></td>
<td valign="top" align="center" colspan="4">0.067</td>
</tr>
<tr>
<td valign="top" align="left" rowspan="3">Negative emotion (d7)</td>
<td valign="top" align="center">Type 1 (Language&#x02013;color)</td>
<td valign="top" align="center">0.097</td>
<td valign="top" align="center">0.041</td>
<td valign="top" align="center">2.33</td>
<td valign="top" align="center">0.020<sup>&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Type 4 &#x000D7; Extraversion</td>
<td valign="top" align="center">0.089</td>
<td valign="top" align="center">0.041</td>
<td valign="top" align="center">2.18</td>
<td valign="top" align="center">0.030<sup>&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Type 4 &#x000D7; Agreeableness</td>
<td valign="top" align="center">&#x02212;0.084</td>
<td valign="top" align="center">0.041</td>
<td valign="top" align="center">&#x02212;2.06</td>
<td valign="top" align="center">0.040<sup>&#x0002A;</sup></td>
</tr>
<tr>
<td valign="top" align="left">Adjusted <italic>R</italic><sup>2</sup></td>
<td valign="top" align="center" colspan="4">0.030</td>
</tr>
<tr>
<td valign="top" align="left" rowspan="3">Positive emotion (d8)</td>
<td valign="top" align="center">Type 3 (Spatial sequence)</td>
<td valign="top" align="center">0.114</td>
<td valign="top" align="center">0.041</td>
<td valign="top" align="center">2.79</td>
<td valign="top" align="center">0.006<sup>&#x0002A;&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Type 4 (Visualized sensation)</td>
<td valign="top" align="center">0.146</td>
<td valign="top" align="center">0.042</td>
<td valign="top" align="center">3.50</td>
<td valign="top" align="center">&#x0003C;0.001<sup>&#x0002A;&#x0002A;&#x0002A;</sup></td>
</tr>
 <tr>
<td valign="top" align="center">Extraversion (P1)</td>
<td valign="top" align="center">0.093</td>
<td valign="top" align="center">0.040</td>
<td valign="top" align="center">2.30</td>
<td valign="top" align="center">0.022<sup>&#x0002A;</sup></td>
</tr>
<tr>
<td valign="top" align="left">Adjusted <italic>R</italic><sup>2</sup></td>
<td valign="top" align="center" colspan="4">0.055</td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>Each LuCiD subscale served as a dependent variable. Predictors were selected using an AIC-based stepwise procedure. This table reports only statistically significant standardized coefficients (<italic>p</italic> &#x0003C; 0.05) from the final models; full coefficients for all predictors retained in the final models (including non-significant terms) are provided in <xref ref-type="supplementary-material" rid="SM1">Supplementary Tables 2</xref>, <xref ref-type="supplementary-material" rid="SM1">3</xref>.</p>
<p><sup>&#x0002A;</sup> <italic>p</italic> &#x0003C; 0.05, <sup>&#x0002A;&#x0002A;</sup><italic>p</italic> &#x0003C; 0.01, <sup>&#x0002A;&#x0002A;&#x0002A;</sup><italic>p</italic> &#x0003C; 0.001.</p>
</table-wrap-foot>
</table-wrap>
<p>Table 3 summarizes the final model for the LuCiD total score (adjusted <italic>R</italic><sup>2</sup> &#x0003D; 0.103). Positive coefficients were observed for Type 3 (Spatial sequence), Type 4 (Visualized sensation), and Extraversion, whereas negative coefficients were observed for the interaction terms Type 1 &#x000D7; Extraversion and Type 2 &#x000D7; Openness.</p>
<p>For the eight LuCiD subscales, <xref ref-type="table" rid="T4">Table 4</xref> reports only statistically significant standardized coefficients (<italic>p</italic> &#x0003C; 0.05) from the AIC-selected final models (adjusted <italic>R</italic><sup>2</sup> range: 0.024&#x02013;0.076). Full coefficients for all predictors retained in the AIC-selected models (including non-significant terms) are provided in <xref ref-type="supplementary-material" rid="SM1">Supplementary Tables S2</xref>, <xref ref-type="supplementary-material" rid="SM1">S3</xref>. Across subscales, Type 4 was retained with positive coefficients in each model, with significant positive coefficients for most of the subscales, including Insight, Control, Thought, Realism, Memory, Dissociation, and Positive Emotion. Type 3 was retained in the final models for all outcomes except Negative Emotion and showed consistently positive coefficients, reaching conventional significance in the models for Control, Dissociation, and Positive Emotion. Type 1 was retained in multiple subscale models, showing negative coefficients for Insight and Control and a positive coefficient for Negative Emotion.</p>
<p>Personality traits contributed both as main effects and as moderators. Extraversion showed positive main effects on several outcomes, including Insight, Control, Memory, and Positive Emotion. Openness showed a positive main effect specifically for Thought (logical reasoning within dreams), whereas Agreeableness showed a negative association with Dissociation.</p>
<p>In addition, several interaction terms indicated moderation by personality traits: Type 1 &#x000D7; Extraversion was retained with negative coefficients for Insight and Thought; Type 2 &#x000D7; Extraversion was retained with a negative coefficient for Control; and Type 2 &#x000D7; Openness was retained with a negative coefficient for Dissociation. Type 4 also showed interactions with personality traits in specific affective outcomes (Type 4 &#x000D7; Extraversion for Negative Emotion; Type 4 &#x000D7; Agreeableness for Negative Emotion).</p>
<p><xref ref-type="fig" rid="F2">Figure 2</xref> provides a schematic overview of the direction of standardized coefficients for the final models. Because final models were selected using an AIC-based stepwise procedure, some predictors were retained despite not reaching <italic>p</italic> &#x0003C; 0.05; these coefficients are shown as dashed lines in <xref ref-type="fig" rid="F2">Figure 2</xref> and reported in full in <xref ref-type="supplementary-material" rid="SM1">Supplementary Tables S2</xref>, <xref ref-type="supplementary-material" rid="SM1">S3</xref>.</p>
<fig position="float" id="F2">
<label>Figure 2</label>
<caption><p>Conceptual diagram summarizing the standardized coefficients from the AIC-selected final regression models for the LuCiD total score and each LuCiD subscale (d1&#x02013;d8). Red lines indicate positive coefficients and blue lines indicate negative coefficients. Solid lines indicate statistically significant coefficients (<italic>p</italic> &#x0003C; 0.05), whereas dashed lines indicate non-significant coefficients (<italic>p</italic>&#x02265;0.05). Adjusted <italic>R</italic><sup>2</sup> for each final model is shown at the bottom right.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpsyg-17-1733841-g0002.tif">
<alt-text content-type="machine-generated">The diagram shows eight structural models labeled Total and D1&#x02013;D8 (D1: Insight, D2: Control, D3: Thought, D4: Realism, D5: Memory, D6: Dissociation, D7: Neg. emotion, and D8: Pos. emotion). Each panel illustrates relationships between personality traits and synesthesia subtype variables and their effects on the corresponding LuCiD dimension. Directed arrows are color-coded to represent paths, and values are shown for the paths and the adjusted R-squared for each model.</alt-text>
</graphic>
</fig>
</sec></sec>
<sec sec-type="discussion" id="s4">
<label>4</label>
<title>Discussion</title>
<p>This study aimed to reconsider synesthesia, traditionally treated as a special perceptual phenomenon, within the context of broader cognitive phenomena. Building on the framework of perceptual presence and counterfactual-richness (<xref ref-type="bibr" rid="B25">No&#x000EB;, 2004</xref>, <xref ref-type="bibr" rid="B26">2006</xref>; <xref ref-type="bibr" rid="B38">Seth, 2014</xref>), we proposed that self-reported synesthetic experiences may involve excessive counterfactual-richness and that this property could extend to dreaming, potentially relating to lucid dreaming experiences&#x02014;particularly to the ability to intentionally manipulate dream content (Control). Prior work suggests that synesthetes may report lucid dreaming more frequently (<xref ref-type="bibr" rid="B18">Khallieva et al., 2022</xref>); however, previous evidence has primarily focused on grapheme&#x02013;color synesthesia and has not clarified mechanisms across synesthesia types. Therefore, we examined type-specific associations between synesthesia and lucid dreaming and tested whether personality traits modulate these associations (<xref ref-type="bibr" rid="B35">Rouw and Scholte, 2016</xref>; <xref ref-type="bibr" rid="B15">Hess et al., 2017</xref>).</p>
<p>We conducted a questionnaire survey in Japanese undergraduate students (<italic>n</italic> &#x0003D; 616) using a comprehensive synesthesia self-report questionnaire (<xref ref-type="bibr" rid="B52">Ward and Simner, 2022</xref>), the LuCiD scale for lucid dreaming experiences (<xref ref-type="bibr" rid="B49">Voss et al., 2013</xref>), and the Japanese version of the TIPI for Big Five personality traits (<xref ref-type="bibr" rid="B28">Oshio et al., 2012</xref>). Personality traits were included as covariates and potential moderators through interaction terms, consistent with the moderation hypothesis described in the Introduction.</p>
<p>The results indicated clear heterogeneity across synesthesia types. In the regression models, Type 4 (Visualized sensation) and Type 3 (Spatial sequence) showed positive coefficients for the LuCiD total score and appeared as positive predictors across multiple LuCiD subscales (including Control), whereas Type 1 (Language&#x02013;color) and Type 2 (OLP) showed weaker or negative effects in specific models and were most evident through interactions with personality traits (<xref ref-type="table" rid="T3">Tables 3</xref>, <xref ref-type="table" rid="T4">4</xref>, <xref ref-type="fig" rid="F2">Figure 2</xref>).</p>
<p>In the following sections, we interpret this type-specific pattern using the conceptual-perceptual account and then consider how personality traits may modulate these associations.</p>
<sec>
<label>4.1</label>
<title>Classification of synesthesia: conceptual or perceptual</title>
<p>The results of our regression models suggest that the direction of associations with lucid-dreaming indices may differ across synesthesia subtypes. In particular, Type 3 (Spatial sequence) and Type 4 (Visualized sensation) showed broadly positive associations, whereas Type 1 (Language&#x02013;color) and Type 2 (OLP) showed weaker associations, or even negative associations in some models. This pattern points to heterogeneity that cannot be fully explained by treating synesthesia as a single, unitary phenomenon. Accordingly, as a working hypothesis for interpreting our findings, we introduce a framework that organizes synesthesia types along a conceptual-perceptual dimension.</p>
<p>Here, &#x0201C;conceptual&#x0201D; refers to processing in which stimuli are identified by category or concepts (e.g., phoneme of a grapheme &#x0201C;a&#x0201D;) rather than by lower-level physical features (e.g., font or size of a grapheme &#x0201C;a&#x0201D;). In contrast, &#x0201C;perceptual&#x0201D; refers to processing closely tied to a stimulus&#x00027;s sensory characteristics and that can change dynamically in response to context.</p>
<p>Type 1 (Language&#x02013;color) and Type 2 (OLP) are often induced by symbolic stimuli such as letters, numbers, and days of the week, and previous studies have reported that the induced experiences are largely unaffected by perceptual features such as font or size. Therefore, these types can be regarded as &#x0201C;conceptual synesthesia,&#x0201D; driven primarily by conceptual processing (<xref ref-type="bibr" rid="B40">Simner, 2012</xref>; <xref ref-type="bibr" rid="B8">Dixon et al., 2004</xref>; <xref ref-type="bibr" rid="B34">Rich et al., 2005</xref>; <xref ref-type="bibr" rid="B53">Witthoft and Winawer, 2006</xref>; <xref ref-type="bibr" rid="B32">Ramachandran and Hubbard, 2003</xref>; <xref ref-type="bibr" rid="B4">Chiou and Rich, 2014</xref>). For example, the tendency to select a common color for the Arabic numeral &#x0201C;1&#x0201D; and the Japanese numeral &#x0201C;&#x00600;&#x0201D; suggests that the determining factor may be the concept they represent rather than their visual form <italic>per se</italic> (<xref ref-type="bibr" rid="B1">Asano and Yokosawa, 2012</xref>).</p>
<p>By contrast, Type 4 (Visualized sensation) implies that perceptual inputs such as music, emotion, pain, and odors are accompanied by visual experiences (e.g., color, brightness, and shape). For example, studies of sound&#x02013;color synesthesia have reported that the magnitude or brightness of synesthetic experiences varies continuously in response to continuously changing stimulus frequencies (<xref ref-type="bibr" rid="B5">Chiou et al., 2013</xref>; <xref ref-type="bibr" rid="B51">Ward et al., 2006</xref>). This suggests the involvement of processing tightly coupled to the stimulus&#x00027;s sensory properties. In this sense, Type 4 can be regarded as a prototypical form of more &#x0201C;perceptual&#x0201D; synesthesia.</p>
<p>Type 3 (Spatial sequence) may appear conceptual insofar as it is triggered by symbolic elements such as numbers, time, height, and temperature. However, because the experiential content involves a sequence being evoked as a continuous spatial layout, it has been suggested that the inducer may be represented not as a discrete concept but rather as a continuous magnitude, and may thus function as a stimulus with perceptual qualities (<xref ref-type="bibr" rid="B30">Price, 2014</xref>; <xref ref-type="bibr" rid="B13">Gould et al., 2014</xref>).</p>
<p>Nevertheless, the conceptual/perceptual positioning is not fixed. For instance, for children before learning letters, letters may be processed more as mere shapes, and thus more perceptually. Furthermore, even in sound&#x02013;color synesthesia, some cases involve discrete pitch-class concepts acting as synesthetic inducers (e.g., C as red, D as yellow, E as green), rather than continuous acoustic properties. In such instances, the synesthetic association is likely to have a stronger conceptual component (<xref ref-type="bibr" rid="B17">Itoh et al., 2019</xref>). The conceptual/perceptual organization used in this study is therefore a pragmatic framework that presupposes such continuity. In the next subsection, we explore how this distinction between &#x0201C;more conceptual&#x0201D; and &#x0201C;more perceptual&#x0201D; synesthesia might relate to dream experiences&#x02014;particularly controllable lucid dreams&#x02014;from the perspectives of SMCs and counterfactual richness.</p></sec>
<sec>
<label>4.2</label>
<title>Impact of synesthesia types (conceptual/perceptual classification) on dream experiences</title>
<p>Based on the conceptual&#x02013;perceptual distinction introduced in the previous section, we interpret the associations between synesthesia types and lucid-dream outcomes (LuCiD) (<xref ref-type="table" rid="T3">Tables 3</xref>, <xref ref-type="table" rid="T4">4</xref>, <xref ref-type="fig" rid="F2">Figure 2</xref>).</p>
<p>Type 4 (Visualized sensation), which is characterized by perceptual features, showed a positive association with the LuCiD total score and yielded consistently positive coefficients across a wide range of subscales, including Insight, Control, Thought, Realism, Memory, Dissociation, and Positive Emotion. One possible interpretation is that Type 4 involves dynamically generated visual concurrents (e.g., color, shape, brightness) in response to continuous sensory inputs such as sounds, odors, pains&#x02013;that is, the repertoire of possible SMCs is continuously and dynamically generated &#x02013;thereby providing excessive counterfactual-richness (<xref ref-type="bibr" rid="B27">O&#x00027;Regan and No&#x000EB;, 2001</xref>; <xref ref-type="bibr" rid="B16">Ikegami and Zlatev, 2008</xref>; <xref ref-type="bibr" rid="B38">Seth, 2014</xref>).</p>
<p>Following the perceptual-presence account outlined in the Introduction, the excessive counterfactual richness of Type 4 thereby enhances perceptual presence in dreams&#x02014;that is, the sense that the dream world is &#x0201C;really there.&#x0201D; A heightened perceptual presence may, in turn, provide a more coherent and action-relevant world model during dreaming, facilitating metacognitive monitoring of the ongoing experience (e.g., noticing inconsistencies and reflecting on one&#x00027;s state) and intentional intervention. This interpretation offers a plausible bridge from SMC/counterfactual-richness to LuCiD facets such as Insight, Dissociation, and Control, and may also be consistent with more elaborate in-dream cognition and recall (e.g., Thought, Memory) and a more realistic phenomenology (Realism). We emphasize that these links remain theoretical and should be tested more directly in future work using measures that capture SMC/counterfactual structure and perceptual presence during dreaming.</p>
<p>Similarly, Type 3 (Spatial sequence) also showed positive associations with the LuCiD total score and, in particular, with Control, Dissociation, and Positive Emotion. Because Type 3 is experienced as a sequential spatial layout that emerges as a continuous configuration, it may more readily increase potential SMCs related to spatial aspects of dreaming, such as scene construction and self-localization.</p>
<p>In contrast, for the more conceptually dominant types&#x02013;Type 1 (Language&#x02013;color) and Type 2 (OLP)&#x02013;associations with LuCiD outcomes were generally limited. For Type 1, negative coefficients were observed for Insight and Control, while a positive coefficient was observed for Negative Emotion. Moreover, the retention of interaction terms such as Type 1 &#x000D7; Extraversion, Type 2 &#x000D7; Extraversion, and Type 2 &#x000D7; Openness in the final models suggests that the influence of conceptual synesthesia may not be uniform but may instead be conditional on personality traits and/or cognitive style. Conceptual synesthesia relies on conceptual processing that can become highly automatized through learning (<xref ref-type="bibr" rid="B47">Stroop, 1935</xref>; <xref ref-type="bibr" rid="B21">MacLeod, 1991</xref>), and thus does not necessarily involve the sequential, continuous updating of SMCs assumed for perceptually grounded synesthesia. Within the present framework (SMC/counterfactual-richness), the absence of a consistent &#x0201C;enhancement&#x0201D; pattern for Type 1/2 across LuCiD facets can therefore be regarded as compatible with their concept-dominant and automatic-processing characteristics.</p>
<p>In summary, organizing synesthesia types along a perceptual (Visualized sensation, Spatial sequence) vs. conceptual (Language&#x02013;color, OLP) dimension is useful for understanding their contributions to lucid-dream outcomes. At least for the perceptually grounded types (Type 3/4), the observed pattern was consistent with our hypothesis that increased counterfactual-richness may facilitate dream controllability and metacognitive facets of lucidity. This conceptual reclassification may also offer implications for the cognitive and phenomenological understanding of synesthesia. Future studies should incorporate indices that more directly capture SMC/counterfactual-richness and examine their influence on dreaming and other everyday experiences.</p></sec>
<sec>
<label>4.3</label>
<title>Role of personality traits in the synesthesia&#x02013;lucid dream association</title>
<p>The regression models suggested that personality traits can contribute to LuCiD outcomes both as direct effects (main effects) and as moderating effects via interactions with synesthesia types (<xref ref-type="table" rid="T3">Tables 3</xref>, <xref ref-type="table" rid="T4">4</xref>, <xref ref-type="fig" rid="F2">Figure 2</xref>). Below, we focus on Extraversion, Openness, and Agreeableness and interpret their main effects and interaction patterns for the LuCiD subscales. Bivariate correlations (<xref ref-type="table" rid="T2">Table 2</xref>) are referenced only as supplementary information.</p>
<sec>
<label>4.3.1</label>
<title>Extraversion</title>
<p>Extraversion was retained with a positive coefficient for the LuCiD total score and also showed positive main effects for the subscales Insight, Control, Memory, and Positive Emotion (<xref ref-type="table" rid="T3">Tables 3</xref>, <xref ref-type="table" rid="T4">4</xref>). Associations between Extraversion and lucid dreaming (and related dream indices) have been reported in prior work (<xref ref-type="bibr" rid="B15">Hess et al., 2017</xref>; <xref ref-type="bibr" rid="B37">Schredl et al., 2016</xref>), and the present results are broadly consistent with this literature.</p>
<p>Individuals higher in Extraversion tend to allocate more attention to positively valenced emotional information and show a positivity bias in memory (<xref ref-type="bibr" rid="B14">Haas and Canli, 2008</xref>; <xref ref-type="bibr" rid="B9">Dolcos et al., 2011</xref>). Autobiographical memory research further suggests that extraversion predicts recalling more positive personal events and may contribute to maintaining a positive mood after such recall (<xref ref-type="bibr" rid="B6">Denkova et al., 2012</xref>; <xref ref-type="bibr" rid="B9">Dolcos et al., 2011</xref>). From this perspective, higher Extraversion may be reflected in easier recall of dream experiences (Memory) and more positive evaluations of dream experiences (Positive Emotion). In addition, the active engagement that often accompanies Extraversion may contribute to self-reports of metacognitive awareness (Insight) and intentional dream manipulation (Control).</p>
<p>Importantly, however, the effect of Extraversion was moderated by synesthesia type. For the conceptually oriented Type 1 (Language&#x02013;color), Type 1 &#x000D7; Extraversion was retained as a negative interaction for the LuCiD total score, and similar negative interactions were observed for Insight and Thought. For Type 2 (OLP), Type 2 &#x000D7; Extraversion was retained as a negative interaction for Control (<xref ref-type="table" rid="T4">Table 4</xref>). These patterns suggest that the positive contribution of Extraversion may be attenuated among individuals with conceptual synesthesia. Because the present study did not directly measure underlying processes, future research should test this possibility using more proximal indicators such as dream metacognition, attentional control, and imagery-manipulation strategies.</p>
<p>By contrast, for the perceptually oriented Type 4 (Visualized sensation), Type 4 &#x000D7; Extraversion was retained as a positive interaction for Thought and Negative Emotion (<xref ref-type="table" rid="T4">Table 4</xref>). Given that Extraversion showed no strong bivariate association with Negative Emotion (<xref ref-type="table" rid="T2">Table 2</xref>), this finding should not be interpreted as evidence that Extraversion generally increases negative dream affect. Rather, it may indicate that, specifically among individuals with richly perceptual synesthetic experiences, higher Extraversion is associated with increased dream-cognitive activity (Thought) and with a broader range of emotional arousal that can also influence reports of Negative Emotion.</p></sec>
<sec>
<label>4.3.2</label>
<title>Openness</title>
<p>Openness is a trait that encompasses intellectual curiosity for novelty and complexity, imaginative engagement, and introspective attitudes, and prior studies have reported positive associations between Openness and lucid dreaming (<xref ref-type="bibr" rid="B37">Schredl et al., 2016</xref>; <xref ref-type="bibr" rid="B15">Hess et al., 2017</xref>). In the present study, Openness also showed small positive bivariate correlations with the LuCiD total score and with the Control and Thought subscales (<xref ref-type="table" rid="T2">Table 2</xref>). However, in the regression model including other personality traits and synesthesia types, the main effect of Openness did not remain significant for the LuCiD total score (<xref ref-type="table" rid="T3">Table 3</xref>), suggesting that its contribution may be shared with other predictors and may be relatively small as an independent effect.</p>
<p>Notably, Openness showed a positive main effect on the Thought subscale (<xref ref-type="table" rid="T4">Table 4</xref>). This may indicate that individuals high in Openness are more likely to report vivid, active cognitive activity in dreams. This interpretation is consistent with the idea that Openness-related imaginative engagement and associative richness are reflected in subjective evaluations of dream cognition (Thought). Relatedly, Openness has been reported to be positively associated with the subjective experience of autobiographical memory (e.g., vividness) and with the use of memory (<xref ref-type="bibr" rid="B33">Rasmussen and Berntsen, 2010</xref>), which is also compatible with an &#x0201C;access to internal experience&#x0201D; account.</p>
<p>Furthermore, because Type 2 (OLP) &#x000D7; Openness was retained as a negative coefficient for the LuCiD total score (and for Dissociation) (<xref ref-type="table" rid="T3">Tables 3</xref>, <xref ref-type="table" rid="T4">4</xref>), Openness may be better conceptualized not as a uniform &#x0201C;enhancer&#x0201D; of lucid dreaming but as a moderating factor whose association with lucid-dream outcomes can be conditional on synesthesia type&#x02014;particularly for conceptually oriented synesthesia.</p></sec>
<sec>
<label>4.3.3</label>
<title>Agreeableness</title>
<p>In the present study, Agreeableness showed a negative main effect on Dissociation (i.e., the separation between self and dream content). Higher Agreeableness may be associated with stronger empathic attunement and involvement in dream events and emotions, resulting in lower reported Dissociation. In addition, for Negative Emotion, Type 4 (Visualized sensation) &#x000D7; Agreeableness was retained as a negative coefficient. From the perspective that Type 4 is counterfactually-rich, this interaction may suggest that even under conditions where perceptually rich experiences are more likely, Agreeableness (e.g., interpersonal harmony, empathy, and conflict-avoidance tendencies) may buffer emotional appraisal and reduce reports of negative dream affect.</p></sec>
<sec>
<label>4.3.4</label>
<title>Summary</title>
<p>The explanatory power of the regression models was modest (adjusted <italic>R</italic><sup>2</sup>&#x02248;0.10 for the total score and approximately 0.02&#x02013;0.08 across subscales). Given that lucid-dream experiences are likely determined by multiple factors, synesthesia type and the Big Five may capture only a small portion of the variance. Moreover, because the present study is based on self-report data, recall and evaluative biases cannot be disentangled. Future research should employ prospective and repeated measurements (e.g., dream diaries) and explicitly incorporate mechanistically proximal variables (e.g., imagery vividness, absorption, dream-recall frequency, and metacognitive monitoring) to test reproducibility and to refine explanatory models.</p></sec></sec>
<sec>
<label>4.4</label>
<title>Continuity hypothesis between synesthetic and dream experiences</title>
<p>The findings of the current study suggest the possibility of a continuous influence of synesthetic perceptual experiences extending into dream experiences, particularly lucid dreaming. This interpretation aligns with the Continuity Hypothesis (<xref ref-type="bibr" rid="B36">Schredl, 2003</xref>; <xref ref-type="bibr" rid="B10">Domhoff, 2017</xref>), which proposes that dream content is continuous with waking cognitive processing and everyday perceptual experiences. According to the Continuity Hypothesis, dreams reflect an extension of cognitive processes and sensory experiences that frequently occur in daily life.</p>
<p>Synesthetes engage in specific perceptual processes in response to particular stimuli (e.g., letters, numbers, sounds) during their waking lives. In particular, perceptual synesthetes (Visualized sensation, Spatial sequence) may inherently provide excessive sensorimotor contingencies (SMCs) to sensory stimuli. Such a perceptual processing style likely continues into dream experiences, potentially facilitating lucid dream experiences characterized by enhanced dream content control (due to excessive SMCs) and heightened perceptual presence. Consequently, these experiences may lead to increased insight (awareness of dreaming) and dissociation (the separation of self from dream content).</p>
<p>Conceptualizing synesthetic sensory-perceptual processing in daily life as potentially continuous with dream experiences may provide a more comprehensive and phenomenological account of the relationship between synesthesia and lucid dreaming. Future research should combine quantitative assessments of synesthesia (e.g., temporal-consistency measures indexing temporal stability) with phenomenological analyses of dream reports to clarify how everyday synesthetic experiences are reflected in dream content and dream awareness. Because the present study is based on self-report data, the mechanisms proposed here should be regarded as conceptual and interpreted with appropriate caution.</p></sec>
<sec>
<label>4.5</label>
<title>Methodological limitations and scope of interpretation</title>
<p>As anticipated from our a priori decision to use a broad self-report questionnaire (<xref ref-type="bibr" rid="B35">Rouw and Scholte, 2016</xref>), we observed a high endorsement rate of synesthetic experiences in this sample, which should not be interpreted as a prevalence estimate for the general population. Participants were recruited from a university course focused on &#x0201C;dreams and sleep,&#x0201D; a context that may have attracted students with a heightened interest in dreaming and related anomalous or introspective experiences. Moreover, because our synesthesia assessment relied on self-report rather than a behavioral consistency test, endorsement should be interpreted as indicating self-reported synesthesia-like experiences (or a broader synesthetic trait) rather than a definitive behavioral diagnosis of synesthesia. Accordingly, when compared with prevalence estimates for grapheme&#x02013;color synesthesia reported in prior population-based studies (<xref ref-type="bibr" rid="B44">Simner and Hubbard, 2006</xref>; <xref ref-type="bibr" rid="B50">Ward, 2013</xref>), the elevated endorsement rate observed here likely reflects both sample-specific characteristics and the breadth of the self-report methodology (<xref ref-type="bibr" rid="B12">Eagleman et al., 2007</xref>; <xref ref-type="bibr" rid="B35">Rouw and Scholte, 2016</xref>; <xref ref-type="bibr" rid="B52">Ward and Simner, 2022</xref>). As noted in previous work, self-report approaches can capture a broader range of cross-modal experiences, including associations that may not meet strict temporal-consistency criteria (<xref ref-type="bibr" rid="B35">Rouw and Scholte, 2016</xref>; <xref ref-type="bibr" rid="B52">Ward and Simner, 2022</xref>).</p>
<p>Importantly, hierarchical clustering of item-level endorsements reproduced the targeted four-type structure in a manner broadly consistent with earlier self-report clustering studies (<xref ref-type="bibr" rid="B52">Ward and Simner, 2022</xref>), providing evidence that participants&#x00027; responses were systematically structured rather than arbitrary. Nevertheless, this structural replication does not establish temporal consistency at the individual level; therefore, the present conclusions are restricted to associations between self-reported subtype endorsements and LuCiD outcomes. Notably, the relatively similar endorsement rates across the four targeted types (<xref ref-type="fig" rid="F1">Figure 1a</xref>) are consistent with questionnaire-based clustering work: <xref ref-type="bibr" rid="B52">Ward and Simner (2022)</xref> reported broadly comparable cluster prevalences for Language&#x02013;Color (0.684), Personification (0.440), Visualized sensations (0.576), and Sequence-space synesthesia (0.618).</p>
<p>Several additional limitations should be noted. First, both synesthesia and lucid dreaming were assessed retrospectively via self-report; individual differences in introspective tendency, response style, and dream recall could therefore inflate associations between subtype endorsement and LuCiD scores. Second, the cross-sectional design precludes causal inference: synesthesia-like experiences might facilitate lucid dreaming, lucid-dream propensity (or interest in dreaming) might increase synesthesia endorsement, or a third factor (e.g., imagery/absorption or metacognitive monitoring) may contribute to both.</p>
<p>In addition, the variance explained by the regression models was modest (adjusted <italic>R</italic><sup>2</sup> &#x0003D; 0.103 for the LuCiD total score; 0.024&#x02013;0.076 across subscales), indicating small effect sizes and substantial unexplained variability. Finally, because models (including interaction terms) were selected via an AIC-based stepwise procedure across multiple outcomes, some retained effects may be sample-specific. Accordingly, the present findings should be treated as hypothesis-generating and should be tested in independent samples using preregistered analyses, together with prospective dream measures (e.g., diaries) and behavioral diagnostics of synesthesia.</p>
<p>Finally, the internal consistency of the LuCiD subscales should be considered when interpreting the present findings. Although the LuCiD total score showed adequate reliability (Cronbach&#x00027;s &#x003B1; &#x0003D; 0.79), several subscales&#x02013;particularly Realism (&#x003B1; &#x0003D; 0.66), Memory (&#x003B1; &#x0003D; 0.62), and Dissociation (&#x003B1; &#x0003D; 0.69)&#x02013;exhibited relatively lower internal consistency. These subscales&#x02014;Realism, Memory and Dissociation&#x02014;may be particularly sensitive to state-dependent fluctuations and individual differences in metacognitive access to dream experiences, which could reduce item homogeneity. While these values fall within ranges commonly considered acceptable for psychological research, reduced reliability may increase measurement noise and attenuate effect estimates, especially in regression analyses. Accordingly, the results for these subscales should be interpreted with appropriate caution. Future research should re-examine item wording, assess test&#x02013;retest reliability, and consider confirmatory factor analytic approaches to further refine the measurement properties of the LuCiD in relation to synesthesia research.</p>
<p>Finally, recent phenomenological work suggests that synesthetic experiences may involve dynamically generated sensory&#x02013;affective qualities that are not fully captured by temporal consistency measures alone (<xref ref-type="bibr" rid="B22">Matsuda and Daikoku, 2025</xref>). Future research should therefore combine behavioral diagnostics (e.g., consistency testing) with self-report and phenomenological approaches to clarify which facets of synesthetic experience are most relevant to lucid-dreaming outcomes.</p></sec></sec>
<sec sec-type="conclusions" id="s5">
<label>5</label>
<title>Conclusion</title>
<p>This study demonstrated a close association between synesthesia and lucid dreaming experiences through the concept of counterfactual-richness. Specifically, perceptual synesthesia types (Spatial sequence and Visualized sensation) were shown to significantly enhance various aspects of lucid dreaming, including dream control, insight, and dissociation. In contrast, conceptual synesthesia types (Language-color and OLP) exhibited inhibitory effects, providing novel insights into synesthesia classification and underlying mechanisms.</p>
<p>In this study, a self-report method was used to evaluate synesthesia comprehensively. However, future research should employ standard diagnostic methods (e.g., temporal consistency tests) to identify synesthetes rigorously, and subsequently conduct in-depth qualitative analyses of the relationship between synesthesia and lucid dreaming using smaller samples. In particular, future investigations should focus on Spatial sequence and Visualized sensation synesthesia types, which exhibit pronounced associations with lucid dreaming, and examine how these types qualitatively influence dream content. Such studies would deepen our understanding of the synesthesia-dream relationship and provide a more concrete empirical evaluation of the concept of counterfactual-richness.</p></sec>
</body>
<back>
<sec sec-type="data-availability" id="s6">
<title>Data availability statement</title>
<p>The original contributions presented in the study are included in the article/<xref ref-type="sec" rid="s12">Supplementary material</xref>, further inquiries can be directed to the corresponding author.</p>
</sec>
<sec sec-type="ethics-statement" id="s7">
<title>Ethics statement</title>
<p>The studies involving humans were approved by the Toyo University Ethics Committee (Approval number: P240021). The studies were conducted in accordance with the local legislation and institutional requirements. The participants provided their written informed consent to participate in this study.</p>
</sec>
<sec sec-type="author-contributions" id="s8">
<title>Author contributions</title>
<p>EM (1st author): Writing &#x02013; original draft, Resources, Formal analysis, Project administration, Visualization, Data curation, Validation, Conceptualization, Software, Funding acquisition, Methodology. EM (2nd author): Writing &#x02013; review &#x00026; editing, Software, Methodology, Funding acquisition, Conceptualization, Data curation, Resources, Formal analysis, Validation, Project administration.</p>
</sec>
<ack><title>Acknowledgments</title><p>The authors would like to thank all participants who generously contributed their valuable time and effort to this study. We also appreciate the support and cooperation of our colleagues, research assistants, and students who assisted with data collection and analysis.</p>
</ack>
<sec sec-type="COI-statement" id="conf1">
<title>Conflict of interest</title>
<p>The author(s) declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="ai-statement" id="s10">
<title>Generative AI statement</title>
<p>The author(s) declared that generative AI was not used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p></sec>
<sec sec-type="disclaimer" id="s11">
<title>Publisher&#x00027;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<sec sec-type="supplementary-material" id="s12">
<title>Supplementary material</title>
<p>The Supplementary Material for this article can be found online at: <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/articles/10.3389/fpsyg.2026.1733841/full#supplementary-material">https://www.frontiersin.org/articles/10.3389/fpsyg.2026.1733841/full#supplementary-material</ext-link></p>
<supplementary-material xlink:href="Data_Sheet_1.pdf" id="SM1" mimetype="application/pdf" xmlns:xlink="http://www.w3.org/1999/xlink"/></sec>
<ref-list>
<title>References</title>
<ref id="B1">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Asano</surname> <given-names>M.</given-names></name> <name><surname>Yokosawa</surname> <given-names>K.</given-names></name></person-group> (<year>2012</year>). <article-title>Synesthetic colors for Japanese late acquired graphemes</article-title>. <source>Conscious. Cogn</source>. <volume>21</volume>, <fpage>983</fpage>&#x02013;<lpage>993</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.concog.2012.02.005</pub-id><pub-id pub-id-type="pmid">22418269</pub-id></mixed-citation>
</ref>
<ref id="B2">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Baron-Cohen</surname> <given-names>S.</given-names></name></person-group> (<year>1996</year>). <article-title>Is there a normal phase of synaesthesia in development?</article-title> <source>Psyche</source> <volume>2</volume>, <fpage>2</fpage>&#x02013;<lpage>27</lpage>.</mixed-citation>
</ref>
<ref id="B3">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Barrett</surname> <given-names>D.</given-names></name></person-group> (<year>2017</year>). <article-title>Dreams and creative problem-solving</article-title>. <source>Ann. N. Y. Acad. Sci</source>. <volume>1406</volume>, <fpage>64</fpage>&#x02013;<lpage>67</lpage>. doi: <pub-id pub-id-type="doi">10.1111/nyas.13412</pub-id><pub-id pub-id-type="pmid">28640937</pub-id></mixed-citation>
</ref>
<ref id="B4">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Chiou</surname> <given-names>R.</given-names></name> <name><surname>Rich</surname> <given-names>A. N.</given-names></name></person-group> (<year>2014</year>). <article-title>The role of conceptual knowledge in understanding synesthesia: evaluating contemporary findings from a &#x0201C;hub-and-spokes" perspective</article-title>. <source>Front. Psychol</source>. <volume>5</volume>:<fpage>105</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2014.00105</pub-id></mixed-citation>
</ref>
<ref id="B5">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Chiou</surname> <given-names>R.</given-names></name> <name><surname>Stelter</surname> <given-names>M.</given-names></name> <name><surname>Rich</surname> <given-names>A. N.</given-names></name></person-group> (<year>2013</year>). <article-title>Beyond colour perception: auditory-visual synaesthesia induces experiences of geometric objects in specific locations</article-title>. <source>Cortex</source> <volume>49</volume>, <fpage>1750</fpage>&#x02013;<lpage>1763</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cortex.2012.04.006</pub-id><pub-id pub-id-type="pmid">22673231</pub-id></mixed-citation>
</ref>
<ref id="B6">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Denkova</surname> <given-names>E.</given-names></name> <name><surname>Dolcos</surname> <given-names>S.</given-names></name> <name><surname>Dolcos</surname> <given-names>F.</given-names></name></person-group> (<year>2012</year>). <article-title>Reliving emotional personal memories: affective biases linked to personality and sex-related differences</article-title>. <source>Emotion</source> <volume>12</volume>, <fpage>515</fpage>&#x02013;<lpage>528</lpage>. doi: <pub-id pub-id-type="doi">10.1037/a0026809</pub-id><pub-id pub-id-type="pmid">22251043</pub-id></mixed-citation>
</ref>
<ref id="B7">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Deroy</surname> <given-names>O.</given-names></name> <name><surname>Spence</surname> <given-names>C.</given-names></name></person-group> (<year>2013</year>). <article-title>Why we are not all synesthetes (not even weakly so)</article-title>. <source>Psychon. Bull. Rev</source>. <volume>20</volume>, <fpage>643</fpage>&#x02013;<lpage>664</lpage>. doi: <pub-id pub-id-type="doi">10.3758/s13423-013-0387-2</pub-id><pub-id pub-id-type="pmid">23413012</pub-id></mixed-citation>
</ref>
<ref id="B8">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Dixon</surname> <given-names>M. J.</given-names></name> <name><surname>Smilek</surname> <given-names>D.</given-names></name> <name><surname>Merikle</surname> <given-names>P.</given-names></name></person-group> (<year>2004</year>). <article-title>Not all synaesthetes are created equal: projector versus associator synaesthetes</article-title>. <source>Cogn. Affect. Behav. Neurosci</source>. <volume>4</volume>, <fpage>335</fpage>&#x02013;<lpage>343</lpage>. doi: <pub-id pub-id-type="doi">10.3758/CABN.4.3.335</pub-id><pub-id pub-id-type="pmid">15535169</pub-id></mixed-citation>
</ref>
<ref id="B9">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Dolcos</surname> <given-names>F.</given-names></name> <name><surname>Iordan</surname> <given-names>A. D.</given-names></name> <name><surname>Dolcos</surname> <given-names>S.</given-names></name></person-group> (<year>2011</year>). <article-title>Neural correlates of emotioncognition interactions: a review of evidence from brain imaging investigations</article-title>. <source>J. Cogn. Psychol</source>. <volume>23</volume>, <fpage>669</fpage>&#x02013;<lpage>64</lpage>. doi: <pub-id pub-id-type="doi">10.1080/20445911.2011.594433</pub-id></mixed-citation>
</ref>
<ref id="B10">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Domhoff</surname> <given-names>G. W.</given-names></name></person-group> (<year>2017</year>). <article-title>The invasion of the concept snatchers: the origins, distortions, and future of the continuity hypothesis</article-title>. <source>Dreaming</source> <volume>27</volume>, <fpage>14</fpage>&#x02013;<lpage>39</lpage>. doi: <pub-id pub-id-type="doi">10.1037/drm0000047</pub-id></mixed-citation>
</ref>
<ref id="B11">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Eagleman</surname> <given-names>D. M.</given-names></name></person-group> (<year>2009</year>). <article-title>The objectification of overlearned sequences: a new view of spatial sequence synesthesia</article-title>. <source>Cortex</source> <volume>45</volume>, <fpage>1266</fpage>&#x02013;<lpage>1277</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cortex.2009.06.012</pub-id><pub-id pub-id-type="pmid">19665114</pub-id></mixed-citation>
</ref>
<ref id="B12">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Eagleman</surname> <given-names>D. M.</given-names></name> <name><surname>Kagan</surname> <given-names>A. D.</given-names></name> <name><surname>Nelson</surname> <given-names>S. S.</given-names></name> <name><surname>Sagaram</surname> <given-names>D.</given-names></name> <name><surname>Sarma</surname> <given-names>A. K.</given-names></name></person-group> (<year>2007</year>). <article-title>A standardized test battery for the study of synesthesia</article-title>. <source>J. Neurosci. Methods</source> <volume>159</volume>, <fpage>139</fpage>&#x02013;<lpage>145</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jneumeth.2006.07.012</pub-id><pub-id pub-id-type="pmid">16919755</pub-id></mixed-citation>
</ref>
<ref id="B13">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Gould</surname> <given-names>C.</given-names></name> <name><surname>Froese</surname> <given-names>T.</given-names></name> <name><surname>Barrett</surname> <given-names>A. B.</given-names></name> <name><surname>Ward</surname> <given-names>J.</given-names></name> <name><surname>Seth</surname> <given-names>A. K.</given-names></name></person-group> (<year>2014</year>). <article-title>An extended case study on the phenomenology of sequence-space synesthesia</article-title>. <source>Front. Hum. Neurosci</source>. <volume>8</volume>:<fpage>433</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fnhum.2014.00433</pub-id><pub-id pub-id-type="pmid">25071498</pub-id></mixed-citation>
</ref>
<ref id="B14">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Haas</surname> <given-names>B. W.</given-names></name> <name><surname>Canli</surname> <given-names>T.</given-names></name></person-group> (<year>2008</year>). <article-title>Emotional memory function, personality structure and psychopathology: a neural system approach to the identification of vulnerability markers</article-title>. <source>Brain Res. Rev</source>. <volume>58</volume>, <fpage>71</fpage>&#x02013;<lpage>84</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.brainresrev.2007.10.014</pub-id><pub-id pub-id-type="pmid">18359090</pub-id></mixed-citation>
</ref>
<ref id="B15">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Hess</surname> <given-names>G.</given-names></name> <name><surname>Schredl</surname> <given-names>M.</given-names></name> <name><surname>Goritz</surname> <given-names>A. S.</given-names></name></person-group> (<year>2017</year>). <article-title>Lucid dreaming frequency and the big five personality factors</article-title>. <source>Imagin. Cogn. Pers</source>. <volume>36</volume>, <fpage>240</fpage>&#x02013;<lpage>253</lpage>. doi: <pub-id pub-id-type="doi">10.1177/0276236616648653</pub-id></mixed-citation>
</ref>
<ref id="B16">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ikegami</surname> <given-names>T.</given-names></name> <name><surname>Zlatev</surname> <given-names>J.</given-names></name></person-group> (<year>2008</year>). <article-title>From pre-representational cognition to language</article-title>. <source>Phenomenol. Cogn. Sci</source>. <volume>7</volume>, <fpage>213</fpage>&#x02013;<lpage>235</lpage>. doi: <pub-id pub-id-type="doi">10.1515/9783110207507.2.197</pub-id></mixed-citation>
</ref>
<ref id="B17">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Itoh</surname> <given-names>K.</given-names></name> <name><surname>Sakata</surname> <given-names>H.</given-names></name> <name><surname>Igarashi</surname> <given-names>H.</given-names></name> <name><surname>Nakada</surname> <given-names>T.</given-names></name></person-group> (<year>2019</year>). <article-title>Automaticity of pitch class-color synesthesia as revealed by a stroop-like effect</article-title>. <source>Conscious. Cogn</source>. <volume>71</volume>, <fpage>86</fpage>&#x02013;<lpage>91</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.concog.2019.04.001</pub-id><pub-id pub-id-type="pmid">30978617</pub-id></mixed-citation>
</ref>
<ref id="B18">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Khallieva</surname> <given-names>V.</given-names></name> <name><surname>Sinke</surname> <given-names>C.</given-names></name> <name><surname>Zedler</surname> <given-names>M.</given-names></name> <name><surname>Worthmann</surname> <given-names>H.</given-names></name> <name><surname>Bleich</surname> <given-names>S.</given-names></name> <name><surname>Szycik</surname> <given-names>G. R.</given-names></name></person-group> (<year>2022</year>). <article-title>Dreaming and lucidity in synesthesia</article-title>. <source>Dreaming</source> <volume>32</volume>, <fpage>206</fpage>&#x02013;<lpage>220</lpage>. doi: <pub-id pub-id-type="doi">10.1037/drm0000190</pub-id></mixed-citation>
</ref>
<ref id="B19">
<mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>K&#x000F6;hler</surname> <given-names>W.</given-names></name></person-group> (<year>1929</year>). <source>Gestalt Psychology</source>. <publisher-loc>New York, NY</publisher-loc>: <publisher-name>Liveright</publisher-name>.</mixed-citation>
</ref>
<ref id="B20">
<mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Laberge</surname> <given-names>S.</given-names></name></person-group> (<year>1985</year>). <source>Lucid Dreaming</source>. <publisher-loc>Los Angeles, CA</publisher-loc>: <publisher-name>Jeremy P</publisher-name>. Tarcher.</mixed-citation>
</ref>
<ref id="B21">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>MacLeod</surname> <given-names>C. M.</given-names></name></person-group> (<year>1991</year>). <article-title>Half a century of research on the stroop effect: an integrative review</article-title>. <source>Psychol. Bull</source>. <volume>109</volume>, <fpage>163</fpage>&#x02013;<lpage>203</lpage>. doi: <pub-id pub-id-type="doi">10.1037/0033-2909.109.2.163</pub-id><pub-id pub-id-type="pmid">2034749</pub-id></mixed-citation>
</ref>
<ref id="B22">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Matsuda</surname> <given-names>E.</given-names></name> <name><surname>Daikoku</surname> <given-names>T.</given-names></name></person-group> (<year>2025</year>). <article-title>Phenomenological analysis of chromaesthesia as pre-representational dynamic sense-making</article-title>. <source>PsyArXiv</source>. [Preprint].</mixed-citation>
</ref>
<ref id="B23">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Meier</surname> <given-names>B.</given-names></name> <name><surname>Rothen</surname> <given-names>N.</given-names></name> <name><surname>Walter</surname> <given-names>S.</given-names></name></person-group> (<year>2014</year>). <article-title>Developmental aspects of synaesthesia across the adult lifespan</article-title>. <source>Front. Hum. Neurosci</source>. <volume>8</volume>, <fpage>1</fpage>&#x02013;<lpage>12</lpage>. doi: <pub-id pub-id-type="doi">10.3389/fnhum.2014.00129</pub-id><pub-id pub-id-type="pmid">24653689</pub-id></mixed-citation>
</ref>
<ref id="B24">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Mondloch</surname> <given-names>C. J.</given-names></name> <name><surname>Maurer</surname> <given-names>D.</given-names></name></person-group> (<year>2004</year>). <article-title>Do small white balls squeak? pitch-object correspondences in young children</article-title>. <source>Cogn. Affect. Behav. Neurosci</source>. <volume>4</volume>, <fpage>133</fpage>&#x02013;<lpage>136</lpage>. doi: <pub-id pub-id-type="doi">10.3758/CABN.4.2.133</pub-id><pub-id pub-id-type="pmid">15460920</pub-id></mixed-citation>
</ref>
<ref id="B25">
<mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>No&#x000EB;</surname> <given-names>A.</given-names></name></person-group> (<year>2004</year>). <source>Action in Perception</source>. <publisher-loc>Cambridge, MA</publisher-loc>: <publisher-name>MIT Press</publisher-name>.</mixed-citation>
</ref>
<ref id="B26">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>No&#x000EB;</surname> <given-names>A.</given-names></name></person-group> (<year>2006</year>). <article-title>&#x0201C;Experience without the head,&#x0201D;</article-title> in <source>Perceptual Experience</source>, ed T. Metzinger (New York, NY: Clarendon/Oxford University Press), <fpage>411</fpage>&#x02013;<lpage>434</lpage>. doi: <pub-id pub-id-type="doi">10.1093/acprof:oso/9780199289769.003.0012</pub-id></mixed-citation>
</ref>
<ref id="B27">
<mixed-citation publication-type="journal"><collab>O&#x00027;Regan J. K. and No&#x000EB;, A..</collab> (<year>2001</year>). <article-title>A sensorimotor account of vision and visual consciousness</article-title>. <source>Behav. Brain Sci</source>. <volume>24</volume>, <fpage>939</fpage>&#x02013;<lpage>973</lpage>. doi: <pub-id pub-id-type="doi">10.1017/S0140525X01000115</pub-id></mixed-citation>
</ref>
<ref id="B28">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Oshio</surname> <given-names>A.</given-names></name> <name><surname>Abe</surname> <given-names>S.</given-names></name> <name><surname>Cutrone</surname> <given-names>P.</given-names></name></person-group> (<year>2012</year>). <article-title>Development, reliability, and validity of the Japanese version of ten item personality inventory (TIPI-J)</article-title>. <source>Jpn. J. Pers</source>. <volume>21</volume>, <fpage>40</fpage>&#x02013;<lpage>52</lpage>. doi: <pub-id pub-id-type="doi">10.2132/personality.21.40</pub-id></mixed-citation>
</ref>
<ref id="B29">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Pfeifer</surname> <given-names>G.</given-names></name> <name><surname>Chan</surname> <given-names>J. W. D.</given-names></name> <name><surname>Sigala</surname> <given-names>N.</given-names></name></person-group> (<year>2017</year>). <article-title>Representational account of memory: insights from aging and synesthesia</article-title>. <source>J. Cogn. Neurosci</source>. <volume>28</volume>, <fpage>1987</fpage>&#x02013;<lpage>2002</lpage>. doi: <pub-id pub-id-type="doi">10.1162/jocn_a_01014</pub-id><pub-id pub-id-type="pmid">27458751</pub-id></mixed-citation>
</ref>
<ref id="B30">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Price</surname> <given-names>M. C.</given-names></name></person-group> (<year>2014</year>). <article-title>Insights from introspection: a commentary on Gould et al. (2014). &#x0201C;An extended case study on the phenomenology of spatial form synaesthesia&#x0201D;</article-title>. <source>Front. Human Neurosci.</source> <volume>8</volume>:<fpage>439</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fnhum.2014.00439</pub-id><pub-id pub-id-type="pmid">25071500</pub-id></mixed-citation>
</ref>
<ref id="B31">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ramachandran</surname> <given-names>V. S.</given-names></name> <name><surname>Hubbard</surname> <given-names>E. M.</given-names></name></person-group> (<year>2001</year>). <article-title>Synaesthesia: a window into perception, thought and language</article-title>. <source>J. Conscious. Stud.</source>, <volume>8</volume>, <fpage>3</fpage>&#x02013;<lpage>34</lpage>.</mixed-citation>
</ref>
<ref id="B32">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ramachandran</surname> <given-names>V. S.</given-names></name> <name><surname>Hubbard</surname> <given-names>E. M.</given-names></name></person-group> (<year>2003</year>). <article-title>The phenomenology of synaesthesia</article-title>. <source>J. Conscious. Stud</source>. <volume>10</volume>, <fpage>49</fpage>&#x02013;<lpage>57</lpage>.</mixed-citation>
</ref>
<ref id="B33">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Rasmussen</surname> <given-names>A. S.</given-names></name> <name><surname>Berntsen</surname> <given-names>D.</given-names></name></person-group> (<year>2010</year>). <article-title>Personality traits and autobiographical memory: openness is positively related to the experience and usage of recollections</article-title>. <source>Memory</source> <volume>18</volume>, <fpage>774</fpage>&#x02013;<lpage>786</lpage>. doi: <pub-id pub-id-type="doi">10.1080/09658211.2010.514270</pub-id><pub-id pub-id-type="pmid">20924950</pub-id></mixed-citation>
</ref>
<ref id="B34">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Rich</surname> <given-names>A.</given-names></name> <name><surname>Bradshaw</surname> <given-names>J.</given-names></name> <name><surname>Mattingley</surname> <given-names>J.</given-names></name></person-group> (<year>2005</year>). <article-title>A systematic, large-scale study of synaesthesia: implications for the role of early experience in lexical-colour associations</article-title>. <source>Cognition</source> <volume>98</volume>, <fpage>53</fpage>&#x02013;<lpage>84</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cognition.2004.11.003</pub-id><pub-id pub-id-type="pmid">16297676</pub-id></mixed-citation>
</ref>
<ref id="B35">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Rouw</surname> <given-names>R.</given-names></name> <name><surname>Scholte</surname> <given-names>H. S.</given-names></name></person-group> (<year>2016</year>). <article-title>Personality and cognitive profiles of a general synesthetic trait</article-title>. <source>Neuropsychologia</source> <volume>88</volume>, <fpage>35</fpage>&#x02013;<lpage>48</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuropsychologia.2016.01.006</pub-id><pub-id pub-id-type="pmid">26772146</pub-id></mixed-citation>
</ref>
<ref id="B36">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Schredl</surname> <given-names>M.</given-names></name></person-group> (<year>2003</year>). <article-title>Continuity between waking and dreaming: a proposal for a mathematical model</article-title>. <source>Sleep Hypnosis</source>. <volume>5</volume>, <fpage>26</fpage>&#x02013;<lpage>39</lpage>. doi: <pub-id pub-id-type="doi">10.1016/s1053-8100(02)00072-7</pub-id></mixed-citation>
</ref>
<ref id="B37">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Schredl</surname> <given-names>M.</given-names></name> <name><surname>Henley-Einion</surname> <given-names>J.</given-names></name> <name><surname>Blagrove</surname> <given-names>M.</given-names></name></person-group> (<year>2016</year>). <article-title>Lucid dreaming and personality in children/adolescents and adults: the UK Library Study</article-title>. <source>Int. J. Dream Res</source>. <volume>9</volume>, <fpage>75</fpage>&#x02013;<lpage>78</lpage>. doi: <pub-id pub-id-type="doi">10.11588/ijodr.2016.1.26454</pub-id></mixed-citation>
</ref>
<ref id="B38">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Seth</surname> <given-names>A. K.</given-names></name></person-group> (<year>2014</year>). <article-title>A predictive processing theory of sensorimotor contingencies: explaining the puzzle of perceptual presence and its absence in synesthesia</article-title>. <source>Cogn. Neurosci</source>. <volume>5</volume>, <fpage>97</fpage>&#x02013;<lpage>118</lpage>. doi: <pub-id pub-id-type="doi">10.1080/17588928.2013.877880</pub-id><pub-id pub-id-type="pmid">24446823</pub-id></mixed-citation>
</ref>
<ref id="B39">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Shi</surname> <given-names>J.</given-names></name> <name><surname>Matsuda</surname> <given-names>E.</given-names></name></person-group> (<year>2024</year>). <article-title>Recall of lucid dreaming and personality traits: analysis of the big five and stress coping style among Chinese university students</article-title>. <source>Jpn. J. Pers</source>. <volume>33</volume>, <fpage>106</fpage>&#x02013;<lpage>108</lpage>. doi: <pub-id pub-id-type="doi">10.2132/personality.33.2.6</pub-id></mixed-citation>
</ref>
<ref id="B40">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Simner</surname> <given-names>J.</given-names></name></person-group> (<year>2012</year>). <article-title>Defining synaesthesia</article-title>. <source>Br. J. Psychol</source>. <volume>103</volume>, <fpage>1</fpage>&#x02013;<lpage>15</lpage>. doi: <pub-id pub-id-type="doi">10.1348/000712610X528305</pub-id><pub-id pub-id-type="pmid">22229768</pub-id></mixed-citation>
</ref>
<ref id="B41">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Simner</surname> <given-names>J.</given-names></name> <name><surname>Bain</surname> <given-names>A. E.</given-names></name></person-group> (<year>2013</year>). <article-title>A longitudinal study of grapheme-color synesthesia in childhood: 6/7 years to 10/11 years</article-title>. <source>Front. Hum. Neurosci</source>. <volume>7</volume>, <fpage>1</fpage>&#x02013;<lpage>9</lpage>. doi: <pub-id pub-id-type="doi">10.3389/fnhum.2013.00603</pub-id><pub-id pub-id-type="pmid">24312035</pub-id></mixed-citation>
</ref>
<ref id="B42">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Simner</surname> <given-names>J.</given-names></name> <name><surname>Harrold</surname> <given-names>J.</given-names></name> <name><surname>Creed</surname> <given-names>H.</given-names></name> <name><surname>Monro</surname> <given-names>L.</given-names></name> <name><surname>Foulkes</surname> <given-names>L.</given-names></name></person-group> (<year>2009</year>). <article-title>Early detection of markers for synaesthesia in childhood populations</article-title>. <source>Brain</source> <volume>132</volume>, <fpage>57</fpage>&#x02013;<lpage>64</lpage>. doi: <pub-id pub-id-type="doi">10.1093/brain/awn292</pub-id><pub-id pub-id-type="pmid">19015159</pub-id></mixed-citation>
</ref>
<ref id="B43">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Simner</surname> <given-names>J.</given-names></name> <name><surname>Holenstein</surname> <given-names>E.</given-names></name></person-group> (<year>2007</year>). <article-title>Ordinal linguistic personification as a variant of synesthesia</article-title>. <source>J. Cogn. Neurosci</source>. <volume>19</volume>, <fpage>694</fpage>&#x02013;<lpage>703</lpage>. doi: <pub-id pub-id-type="doi">10.1162/jocn.2007.19.4.694</pub-id><pub-id pub-id-type="pmid">17381259</pub-id></mixed-citation>
</ref>
<ref id="B44">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Simner</surname> <given-names>J.</given-names></name> <name><surname>Hubbard</surname> <given-names>E. M.</given-names></name></person-group> (<year>2006</year>). <article-title>Variants of synesthesia interact in cognitive tasks: evidence for implicit associations and late connectivity in cross-talk theories</article-title>. <source>Neuroscience</source> <volume>143</volume>, <fpage>805</fpage>&#x02013;<lpage>814</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuroscience.2006.08.018</pub-id><pub-id pub-id-type="pmid">16996695</pub-id></mixed-citation>
</ref>
<ref id="B45">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Simner</surname> <given-names>J.</given-names></name> <name><surname>Ipser</surname> <given-names>A.</given-names></name> <name><surname>Smees</surname> <given-names>R.</given-names></name> <name><surname>Alvarez</surname> <given-names>J.</given-names></name></person-group> (<year>2017</year>). <article-title>Does synaesthesia age? Changes in the quality and consistency of synaesthetic associations</article-title>. <source>Neuropsychologia</source> <volume>106</volume>, <fpage>407</fpage>&#x02013;<lpage>416</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuropsychologia.2017.09.013</pub-id><pub-id pub-id-type="pmid">28919244</pub-id></mixed-citation>
</ref>
<ref id="B46">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Stickgold</surname> <given-names>R.</given-names></name> <name><surname>Malia</surname> <given-names>A.</given-names></name> <name><surname>Maguire</surname> <given-names>D.</given-names></name> <name><surname>Roddenberry</surname> <given-names>D.</given-names></name> <name><surname>O&#x00027;Connor</surname> <given-names>M.</given-names></name></person-group> (<year>2000</year>). <article-title>Replaying the game: hypnagogic images in normals and amnesics</article-title>. <source>Science</source> <volume>290</volume>, <fpage>350</fpage>&#x02013;<lpage>353</lpage>. doi: <pub-id pub-id-type="doi">10.1126/science.290.5490.350</pub-id><pub-id pub-id-type="pmid">11030656</pub-id></mixed-citation>
</ref>
<ref id="B47">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Stroop</surname> <given-names>J. R.</given-names></name></person-group> (<year>1935</year>). <article-title>Studies of interference in serial verbal reactions</article-title>. <source>J. Exp. Psychol</source>. <volume>18</volume>, <fpage>643</fpage>&#x02013;<lpage>662</lpage>. doi: <pub-id pub-id-type="doi">10.1037/h0054651</pub-id></mixed-citation>
</ref>
<ref id="B48">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Voss</surname> <given-names>U.</given-names></name> <name><surname>Holzmann</surname> <given-names>R.</given-names></name> <name><surname>Tuin</surname> <given-names>I.</given-names></name> <name><surname>Hobson</surname> <given-names>J. A.</given-names></name></person-group> (<year>2009</year>). <article-title>Lucid dreaming: a state of consciousness with features of both waking and non-lucid dreaming</article-title>. <source>Sleep</source> <volume>32</volume>, <fpage>1191</fpage>&#x02013;<lpage>200</lpage>. doi: <pub-id pub-id-type="doi">10.1093/sleep/32.9.1191</pub-id><pub-id pub-id-type="pmid">19750924</pub-id></mixed-citation>
</ref>
<ref id="B49">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Voss</surname> <given-names>U.</given-names></name> <name><surname>Schermelleh-Engel</surname> <given-names>K.</given-names></name> <name><surname>Windt</surname> <given-names>J.</given-names></name> <name><surname>Frenzel</surname> <given-names>C.</given-names></name> <name><surname>Hobson</surname> <given-names>A.</given-names></name></person-group> (<year>2013</year>). <article-title>Measuring consciousness in dreams: the lucidity and consciousness in dreams scale</article-title>. <source>Conscious. Cogn</source>. <volume>22</volume>, <fpage>8</fpage>&#x02013;<lpage>21</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.concog.2012.11.001</pub-id><pub-id pub-id-type="pmid">23220345</pub-id></mixed-citation>
</ref>
<ref id="B50">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ward</surname> <given-names>J.</given-names></name></person-group> (<year>2013</year>). <article-title>Synesthesia</article-title>. <source>Annu. Rev. Psychol</source>. <volume>64</volume>, <fpage>49</fpage>&#x02013;<lpage>75</lpage>. doi: <pub-id pub-id-type="doi">10.1146/annurev-psych-113011-143840</pub-id><pub-id pub-id-type="pmid">22747246</pub-id></mixed-citation>
</ref>
<ref id="B51">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ward</surname> <given-names>J.</given-names></name> <name><surname>Huckstep</surname> <given-names>B.</given-names></name> <name><surname>Tsakanikos</surname> <given-names>E.</given-names></name></person-group> (<year>2006</year>). <article-title>Sound-colour synaesthesia: to what extent does it use cross-modal mechanisms common to us all?</article-title> <source>Cortex</source> <volume>42</volume>, <fpage>264</fpage>&#x02013;<lpage>280</lpage>. doi: <pub-id pub-id-type="doi">10.1016/S0010-9452(08)70352-6</pub-id><pub-id pub-id-type="pmid">16683501</pub-id></mixed-citation>
</ref>
<ref id="B52">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ward</surname> <given-names>J.</given-names></name> <name><surname>Simner</surname> <given-names>J.</given-names></name></person-group> (<year>2022</year>). <article-title>How do different types of synesthesia cluster together? Implications for causal mechanisms</article-title>. <source>Perception</source> <volume>51</volume>, <fpage>91</fpage>&#x02013;<lpage>113</lpage>. doi: <pub-id pub-id-type="doi">10.1177/03010066211070761</pub-id><pub-id pub-id-type="pmid">35040670</pub-id></mixed-citation>
</ref>
<ref id="B53">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Witthoft</surname> <given-names>N.</given-names></name> <name><surname>Winawer</surname> <given-names>J.</given-names></name></person-group> (<year>2006</year>). <article-title>Synesthetic colors determined by having colored refrigerator magnets in childhood</article-title>. <source>Cortex</source> <volume>42</volume>, <fpage>175</fpage>&#x02013;<lpage>183</lpage>. doi: <pub-id pub-id-type="doi">10.1016/S0010-9452(08)70342-3</pub-id><pub-id pub-id-type="pmid">16683491</pub-id></mixed-citation>
</ref>
</ref-list>
<fn-group>
<fn fn-type="custom" custom-type="edited-by" id="fn0001">
<p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/95434/overview">Luca Simione</ext-link>, UNINT - Universit&#x000E0; degli studi Internazionali di Roma, Italy</p>
</fn>
<fn fn-type="custom" custom-type="reviewed-by" id="fn0002">
<p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/1550685/overview">Jennifer Bruder</ext-link>, Carnegie Mellon University in Qatar, Qatar</p>
<p><ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2995231/overview">Ilde Pieroni</ext-link>, Sapienza University of Rome, Italy</p>
</fn>
</fn-group>
</back>
</article>