<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article article-type="review-article" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" dtd-version="1.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Comput. Sci.</journal-id>
<journal-title-group>
<journal-title>Frontiers in Computer Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Comput. Sci.</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">2624-9898</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fcomp.2025.1652537</article-id><article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading"><subject>Conceptual Analysis</subject></subj-group>
</article-categories>
<title-group>
<article-title>An integrative cognitive model for multisensory design: benefits and risks of AI-personalization</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Bartoletti</surname> <given-names>Roxane L.</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="author-notes" rid="fn0003"><sup>&#x2020;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/2871383"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="visualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/visualization/">Visualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Corveleyn</surname> <given-names>Xavier</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<xref ref-type="author-notes" rid="fn0004"><sup>&#x2020;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/173036"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="visualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/visualization/">Visualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x0026; editing</role>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>APSY<sup>-V</sup>, N&#x00EE;mes Universit&#x00E9;</institution>, <city>N&#x00EE;mes</city>, <country country="fr">France</country></aff>
<aff id="aff2"><sup>2</sup><institution>LAPCOS, Universit&#x00E9; C&#x00F4;te d&#x2019;Azur</institution>, <city>Nice</city>, <country country="fr">France</country></aff>
<author-notes><corresp id="c001"><sup>&#x002A;</sup>Correspondence: Xavier Corveleyn, <email xlink:href="mailto:xavier.corveleyn@univ-cotedazur.fr">xavier.corveleyn@univ-cotedazur.fr</email></corresp>
<fn fn-type="other" id="fn0003"><p><sup>&#x2020;</sup>ORCID: Roxane L. Bartoletti, <ext-link xlink:href="http://orcid.org/0000-0001-7570-7084" ext-link-type="uri">orcid.org/0000-0001-7570-7084</ext-link></p></fn>
<fn fn-type="other" id="fn0004"><p>Xavier Corveleyn, <ext-link ext-link-type="uri" xlink:href="http://orcid.org/0000-0001-5181-0597">orcid.org/0000-0001-5181-0597</ext-link></p></fn>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2025-10-17">
<day>17</day>
<month>10</month>
<year>2025</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2025</year>
</pub-date>
<volume>7</volume>
<elocation-id>1652537</elocation-id>
<history>
<date date-type="received">
<day>23</day>
<month>06</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>02</day>
<month>10</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2025 Bartoletti and Corveleyn.</copyright-statement>
<copyright-year>2025</copyright-year>
<copyright-holder>Bartoletti and Corveleyn</copyright-holder>
<license><ali:license_ref start_date="2025-10-17">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<p>In this paper, we examine the interplay between multisensory environments, cognitive performance, and artificial intelligence (AI)-enabled personalization. We propose an integrative cognitive model to better understand how the personalization of the sensory environment influences behavior, emotion, and cognition, drawing upon the Cognitive Capacity Hypothesis, Load Theory, Distraction-Conflict Theory, and the Strength and Vulnerability Integration model. Our integrative model delineates how the characteristics of the individual, the task, and the sensory stimuli interact through arousal modulation. Based on recent conceptual and empirical studies, this model proposes that (1) optimal arousal could improve distractor inhibition and task-focusing, (2) metacognitive misjudgments could lead individuals to select suboptimal sensory environments, and (3) aging alters sensory processing efficiency, necessitating tailored approaches. Within this theoretical proposition, we argue that sensory stimuli modulate arousal and available cognitive capacity, thereby influencing cognitive performance. Thus, when expanding to AI, personalized uni- and multisensory environments could demonstrate both benefits (e.g., enhanced attentional states, therapeutic applications) and risks (e.g., privacy erosion, metacognitive biases). Empirical evidence suggests that preferred background music can reduce mind-wandering, while olfactory stimuli, though underutilized in Western societies, hold untapped potential due to their strong links to memory and emotion. Whereas AI-personalized sensory environments open new perspectives into user experiences and therapeutic approaches (e.g., VR, music therapy, multisensory environment), they raise ethical concerns as the use of algorithms may polarize preferences and exploit behavioral data. Future research should address ethical AI design while leveraging cross-modal correspondences to enhance cognitive, emotional, and behavioral outcomes. Overall, this integrative model proposes an integrative framework by gathering all essential elements for creating a meaningful and coherent multisensory environment, which could be applied to researchers, artists, or marketers.</p>
</abstract>
<kwd-group>
<kwd>multisensory experiences</kwd>
<kwd>personalization</kwd>
<kwd>cognitive performances</kwd>
<kwd>behavior</kwd>
<kwd>artificial intelligence</kwd>
</kwd-group><funding-group><award-group id="gs1"><funding-source id="sp1"><institution-wrap><institution>Universit&#x00E9; C&#x00F4;te d&#x2019;Azur JEDI</institution></institution-wrap></funding-source><award-id rid="sp1">APP2020</award-id></award-group><funding-statement>The author(s) declare that financial support was received for the research and/or publication of this article. This work was supported by the Laboratoire d&#x2019;Anthropologie et de Psychologie Cliniques, Cognitives et Sociales (LAPCOS) of Universit&#x00E9; C&#x00F4;te d&#x2019;Azur (UCA, France). This project also received a grant (APP2020) by the graduate school and research of health sciences ecosystems - Universit&#x00E9; C&#x00F4;te d&#x2019;Azur JEDI (ANR-IS-IDEX-01- XRC2 Idex Project).</funding-statement></funding-group><counts>
<fig-count count="2"/>
<table-count count="0"/>
<equation-count count="0"/>
<ref-count count="137"/>
<page-count count="13"/>
<word-count count="12744"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Human-Media Interaction</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="sec1">
<label>1</label>
<title>Introduction</title>
<p>From the smells and sounds of morning breakfast to the softness of bed sheets and the melodies of music broadcast before sleeping, we are constantly immersed in multisensory environments. These environments are composed of countless sensory stimuli, whether voluntarily or involuntarily generated. Since each perceived stimulus requires cognitive processing, all multisensory settings inevitably shape our behavior and cognition. Reflecting the predominance of visual sensitivity in Western societies, studies investigating the influences of multisensory environments on human behavior and cognition claim the need to open the exploration of our other senses. Researchers should expand beyond sight and hearing to include smell, taste, and touch, fostering the development of tools and spaces that support social, cognitive, emotional, and behavioral growth (<xref ref-type="bibr" rid="ref5">Baines, 2008</xref>; <xref ref-type="bibr" rid="ref110">Spence, 2020b</xref>, <xref ref-type="bibr" rid="ref111">2020c</xref>, <xref ref-type="bibr" rid="ref114">2022</xref>; <xref ref-type="bibr" rid="ref126">Vi et al., 2017</xref>). However, as <xref ref-type="bibr" rid="ref32">Fulkerson (2020)</xref> argues, multisensory experiences are not merely the sum of individual sensory inputs but rather &#x201C;the result of slightly more complex combinations of different sensory systems&#x201D; (<xref ref-type="bibr" rid="ref98">Sathian and Ramachandran, 2020</xref>, p. 54).</p>
<p>These intricate sensory interactions can be observed in both experimental studies and everyday life, from the visual influence of food coloring on taste perception (<xref ref-type="bibr" rid="ref115">Spence et al., 2010</xref>), the deformation of phonemes by the impact of automatic lip-reading (<xref ref-type="bibr" rid="ref70">McGurk and MacDonald, 1976</xref>), the influence of tactile, proprioceptive and visual stimuli on the feeling of a dummy limb embodiment (<xref ref-type="bibr" rid="ref8">Bartoletti et al., 2023</xref>; <xref ref-type="bibr" rid="ref17">Chancel and Ehrsson, 2023</xref>), or the correspondence between the shape or movement of an object or a sound (<xref ref-type="bibr" rid="ref24">Corveleyn et al., 2012</xref>; <xref ref-type="bibr" rid="ref40">Haggard and Cole, 2007</xref>; <xref ref-type="bibr" rid="ref93">Ramachandran and Hubbard, 2001</xref>). Multisensory environments are not always involuntarily generated, as they can be deliberately designed to evoke specific impressions in individuals and groups (<xref ref-type="bibr" rid="ref83">Parker et al., 2024</xref>; <xref ref-type="bibr" rid="ref125">Velasco and Obrist, 2020</xref>). For example, the Tate Sensorium installation offered visitors a multisensory experience in which tactile, auditory, gustatory, visual, and olfactory sensibilities were brought to bear on the exploration of the paintings on display (<xref ref-type="bibr" rid="ref79">Obrist et al., 2017</xref>; <xref ref-type="bibr" rid="ref91">Pursey and Lomas, 2018</xref>). In earlier times, scented concerts were among the first spaces to blend multiple senses. If scents were originally diffused to camouflage the smell of crowds in concert halls, the joint diffusion of scents and music has gradually become a strategy to attract spectators (<xref ref-type="bibr" rid="ref25">Crisinel et al., 2013</xref>; <xref ref-type="bibr" rid="ref112">Spence, 2021a</xref>, <xref ref-type="bibr" rid="ref113">2021b</xref>). Such joint dissemination could overlook the need to match sensory modalities perception (e.g., perceived pleasantness) to create congruent multisensory experiences. Yet the phenomenon of correspondence between sensory modalities, also designed by cross-modal correspondence, occupies a considerable place in the study of the influences of multisensory environments on human behavior, emotion, and cognition.</p>
<sec id="sec2">
<label>1.1</label>
<title>Cross-modal correspondences in multisensory environments</title>
<p>Cross-modal correspondences may be semantic, statistical, structural, or affective in nature (<xref ref-type="bibr" rid="ref75">Motoki et al., 2023</xref>; <xref ref-type="bibr" rid="ref109">Spence, 2020a</xref>). Semantic and statistical correspondences are typically acquired through learning, either via language development or repeated exposure. In contrast, structural correspondences may be innate, emerging from the maturation of shared neural connections. For instance, the perceived familiarity of both odors and music has been linked to overlapping patterns of neural connectivity (<xref ref-type="bibr" rid="ref88">Plailly et al., 2007</xref>), suggesting a shared multisensory neural network underlying these perceptual processes. Affective correspondences, on the other hand, arise from affective characteristics common to the sensory stimuli (e.g., perceived familiarity, intensity, arousal, or pleasantness of sensory stimuli). According to <xref ref-type="bibr" rid="ref75">Motoki et al. (2023)</xref>, these categories are not mutually exclusive and may often co-occur. In this regard, to elicit a &#x201C;congruent effect&#x201D; observable on behavioral or physiological measures, sensory stimuli should be selected according to their type of correspondence.</p>
<p>Psychological studies examining multisensory experiences often explore the effects of matched versus mismatched stimuli on affective perception (e.g., perceived pleasantness, familiarity, intensity, etc.), or on behavior, emotions, and cognition. Regarding the mutual influences of sensory stimuli, <xref ref-type="bibr" rid="ref101">Seo and Hummel (2011)</xref> demonstrated a congruence effect between sounds and odors, likely grounded in statistical and affective correspondence. In their study, pairing a congruent sound (e.g., the crunch of potato chips) with the corresponding odor enhanced the perceived pleasantness of the odor compared to an incongruent pairing (e.g., the sound of coffee with the odor of chips). In a subsequent experiment, scientists demonstrated the existence of a halo effect, whereby the pleasantness of the sound influenced that of the odor, but not vice versa. A few years later, <xref ref-type="bibr" rid="ref102">Seo et al. (2014)</xref> confirmed the role of olfactory-auditory correspondences in perceived pleasantness through three experiments, showing that congruent sounds not only enhanced odor pleasantness but also increased familiarity and identification. In another study conducted by <xref ref-type="bibr" rid="ref124">Velasco et al. (2014)</xref>, participants were asked to rate the pleasantness, intensity, and quality of six odors (blueberry, lemon, and orange considered pleasant, musk, dark chocolate, and smoke considered unpleasant) after listening to pleasant consonant music, unpleasant dissonant music, and white noise. While music did not directly influence olfactory perception, prior exposure to white noise reduced perceived pleasantness, sweetness, and moisture across all odors compared to the music conditions. In terms of arousal, olfactory modulation of musically induced arousal has also been observed in cases of affective correspondence (<xref ref-type="bibr" rid="ref133">Zhou and Yamanaka, 2018</xref>).</p>
<p>Uni- and multisensory stimulations are also investigated for their use in psychological interventions aimed at supporting preserved behavioral, emotional, and cognitive functions. Music, for example, is well known for its benefits over anxiety and depression disorders (<xref ref-type="bibr" rid="ref1">Aalbers et al., 2017</xref>; <xref ref-type="bibr" rid="ref38">Guti&#x00E9;rrez and Camarena, 2015</xref>), as well as in regulating emotions depending on both musical and individual characteristics (<xref ref-type="bibr" rid="ref72">Moore, 2013</xref>). Similarly, multisensory environments have also been employed for elderly individuals with neurodegenerative diseases (<xref ref-type="bibr" rid="ref27">De Oliveira et al., 2014</xref>). In advanced stages of such pathologies, where verbal communication is severely impaired, these interventions can be adapted to focus on aiding nonverbal interaction, emotional regulation, and physiological stabilization (<xref ref-type="bibr" rid="ref2">Ansaldo et al., 2018</xref>; <xref ref-type="bibr" rid="ref22">Clare et al., 2020</xref>; <xref ref-type="bibr" rid="ref65">Maseda et al., 2018</xref>). However, the mechanism through which multisensory experiences can help to maintain or enhance behavior and cognition remains under debate. One promising research direction focuses on the type of cross-modal correspondences involved. For example, <xref ref-type="bibr" rid="ref3">Baccarani et al. (2023)</xref> study represented a significant advance in the understanding of the influence of multisensory olfactory-auditory environments on physiological recovery following cognitive stress. Following cognitive stress induced by a battery of cognitive tasks, participants were assigned to unisensory (either slow-tempo classical music or lavender essential oil diffusion), multisensory (both sensory stimuli diffused together), or neutral (neither music nor odor diffused) environments. The results highlight the effectiveness of unisensory musical and olfactory environments on some of the physiological variables measured, compared with the neutral condition. While a multisensory gain could have been expected in the multisensory condition thanks to the structural matching of stimuli thought by Baccarani et al., no beneficial effect was observed on physiological measures. If declarative data (i.e., how participants felt relaxed) could be interesting to investigate in further studies, the results could suggest that semantic or structural correspondence may not be the most relevant match. The question then arises of an affective correspondence based on the personal experience and preferences of the participants.</p>
<p>Cross-modal correspondence represents one of many factors that can influence behavior, emotion, and cognition. Understanding the broader impact of multisensory environments remains a complex and challenging task. A starting point could be the theoretical cognitive models, which offer useful frameworks for conceptualizing how perceived sensory input shapes cognitive processes.</p>
</sec>
<sec id="sec3">
<label>1.2</label>
<title>Human cognition models to help the understanding of uni and multisensory environments influences</title>
<p>Two recent articles have highlighted how a sensory stimulus (i.e., music in these articles) can influence cognitive performance by drawing on various cognitive models (<xref ref-type="bibr" rid="ref34">Goltz and Sadakata, 2021</xref>; <xref ref-type="bibr" rid="ref35">Gonzalez and Aiello, 2019</xref>). Before presenting these cognitive models, it is important to note that such influences can be broadly explained by the interaction of three categories of factors: the characteristics of the individuals involved, the characteristics of the task being performed, and the characteristics of the sensory stimulus.</p>
<p>Regarding individual characteristics, the Cognitive Capacity Hypothesis (<xref ref-type="bibr" rid="ref52">Kahneman, 1973</xref>) provides insight into how a person may inhibit environmental information. According to this model, cognitive resources are limited and fluctuate based on the individual&#x2019;s arousal level. When arousal is moderately high as opposed to moderately low, their ability to inhibit distractors is enhanced, resulting in greater cognitive capacity for task performance. Conversely, a too low arousal level may prevent individuals from processing relevant environmental information, thus interfering with task completion or behavioral adjustment. This model emphasizes the risk of cognitive overload when a distractor draws on the same cognitive resources required for the activity or the task. In other words, cognitive capacity is modulated by environmental interactions, which in turn influence performance.</p>
<p>Complementary to the Cognitive Capacity Hypothesis, and intersecting individual and task characteristics, the Load Theory (<xref ref-type="bibr" rid="ref58">Lavie, 2005</xref>, <xref ref-type="bibr" rid="ref59">2010</xref>) proposes that the impact of a distractor on task performance depends on the type of load, perceptual or cognitive, demanded by the task. When a task involves a high perceptual load, distractors could be easily blocked out since no perceptual resources would be available and orientable towards the distractors. In this case, the sheer number of perceptual events saturates the individual&#x2019;s processing capacity. However, in a task requiring high cognitive load, the current task could be interrupted by an irrelevant event similar to the activity being performed. For example, an activity that would require high visual attention capacities could be interrupted by a distractor relying on the same capacities. This model also includes the influence of aging on the capacities to inhibit distractors: a reduced perceptual capacity would attenuate sensitivity to irrelevant events, especially for tasks with low perceptual load. In other words, older adults would easily inhibit distractors during a low perceptual task due to impaired perception, compared to younger adults. This developmental perspective, which is necessary to adapt tools and spaces to people of different ages, can be found in other models or scales like the dynamic Neurocognitive Adaptation (<xref ref-type="bibr" rid="ref20">Cieri et al., 2025</xref>; <xref ref-type="bibr" rid="ref21">Cieri et al., 2025</xref>), which offers an interesting lifespan framework.</p>
<p>While Load Theory describes how distractions may impair performance, the Distraction-conflict Theory (<xref ref-type="bibr" rid="ref7">Baron, 1986</xref>) offers a contrasting view, suggesting that distractors could exert a positive influence on cognitive performance. According to this theory, during simple or repetitive tasks with low cognitive demand, individuals could be less inclined to switch to a kind of mind-wandering thanks to a distractor (<xref ref-type="bibr" rid="ref34">Goltz and Sadakata, 2021</xref>; <xref ref-type="bibr" rid="ref35">Gonzalez and Aiello, 2019</xref>). Although mind-wandering has been shown to benefit important cognitive processes, such as episodic memory, empirical evidence suggests that it disrupts behavioral responses to immediate sensory external input, favoring intrinsic, self-generated thoughts (<xref ref-type="bibr" rid="ref6">Baird et al., 2014</xref>; <xref ref-type="bibr" rid="ref90">Poerio et al., 2017</xref>). In the case of mind-wandering during task completion, the distractor would create an attentional conflict, increasing both arousal and task-related cognitive load. People would therefore be more cognitively engaged, and the cognitive capacities for carrying out the task with the addition of the distractor would adjust to the cognitive load required for the completion of the current activity. Following this logic, in the case of a task requiring a very high cognitive load, the attentional conflict created by the distractor would add an additional cognitive load. Individuals&#x2019; cognitive abilities would be overwhelmed, making task completion impossible. In short, distractions would create attentional conflicts that would increase activation and, depending on whether the task is simple or complex, would either support or hinder its completion.</p>
<p>Together, these three models frame environmental sensory stimuli as either distracting or facilitating influences on cognition and behavior, depending on factors such as arousal level and task complexity. Referring to the Cognitive Capacity Hypothesis, <xref ref-type="bibr" rid="ref34">Goltz and Sadakata (2021)</xref> explored how listening to background music affects cognitive task performance. They argued that music may interfere with the cognitive processes required for task execution, especially when it contains lyrics in a familiar language, drawing on the same cognitive systems used for reading or vocabulary learning. In such cases, music would overload the information processing capacity (e.g., via saturation of the phonological loop, see <xref ref-type="bibr" rid="ref4">Baddeley, 1996</xref>), disrupting the ongoing linguistic task. Musical characteristics such as lyrics, musical complexity, volume, and rhythm all contribute to the interference depending on the nature of the task. Unlike music, other sensory stimuli such as smells, light, or temperature do not typically compete for the same cognitive processes. Therefore, it remains difficult to argue that these stimuli interfere with tasks via similar mechanisms.</p>
<p>However, according to the Cognitive Capacity Hypothesis, available cognitive capacities are modulated by arousal. Thus, other kinds of sensory stimuli could therefore indirectly influence the available capacities by modifying arousal. Odor perception, for example, is strongly linked to affective and emotional processing (<xref ref-type="bibr" rid="ref11">Bensafi et al., 2002</xref>; <xref ref-type="bibr" rid="ref43">Herz, 2002</xref>; <xref ref-type="bibr" rid="ref55">Kontaris et al., 2020</xref>; <xref ref-type="bibr" rid="ref121">Toet et al., 2020</xref>), and could impact cognitive capacity via changes in arousal. The hypothesis is inspired by the Mood and Arousal Hypothesis (<xref ref-type="bibr" rid="ref49">Husain et al., 2002</xref>; <xref ref-type="bibr" rid="ref120">Thompson et al., 2001</xref>), originally developed in music research but here extended to other sensory stimuli. According to this hypothesis, listening to music perceived as pleasant enhances positive mood and arousal, enhancing short-term cognitive performance. However, too pleasant music excerpts may over-activate and hamper cognitive performance, while listening to unpleasant music would have a negative impact on mood and arousal, impairing cognitive performance. Though promising, the Mood and Arousal Hypothesis has been tested in other sensory experiences, and empirical studies showed more nuanced results regarding the relationship between mood and arousal. Notably, cognitive performance can improve even in the absence of mood elevation from odor perception (<xref ref-type="bibr" rid="ref74">Moss et al., 2008</xref>), and that trigeminal stimulation induced by certain odorant molecules could also influence cognitive performance (<xref ref-type="bibr" rid="ref61">Lombion et al., 2009</xref>), and that unpleasant odors can enhance cognitive performance, and physiological arousal, including heart rate and skin conductance (<xref ref-type="bibr" rid="ref11">Bensafi et al., 2002</xref>; <xref ref-type="bibr" rid="ref12">Boesveldt et al., 2010</xref>; <xref ref-type="bibr" rid="ref14">Brauchli et al., 1995</xref>). Such findings remind us that the relationship between mood, arousal, and sensory perceptions cannot be synthesized only by one factor, such as pleasantness.</p>
<p>Similarly, applying the Distraction-Conflict Theory to other sensory modalities seems challenging. For example, <xref ref-type="bibr" rid="ref46">Ho and Spence (2005)</xref> demonstrated that peppermint odor obtained through a synthetic compound did not enhance vigilance in a simple sequential task. Instead, it improved concentration in a more complex dual-task condition. According to the authors, an increase in vigilance would have reduced reaction time without improving accuracy. However, accuracy improved only in the dual-task setting. This suggests the peppermint odor facilitated response inhibition in a complex multisensory context. It is important to note that during the dual task, the participant&#x2019;s response was dependent on processing multisensory information, not just uni-sensory input. Thus, according to Ho and Spence, peppermint odorant improved the accuracy of participants&#x2019; responses by having a positive effect on the ability to inhibit dominant responses during a complex task requiring the processing of multisensory information.</p>
<p>To summarize, current cognitive models trying to represent the influences of sensory stimuli on cognition and behavior remain predominantly unimodal and incomplete. Although the effects of multisensory environments, such as olfactory-auditory ones, are still a matter of scientific debate, their use in multisensory designs is growing (<xref ref-type="bibr" rid="ref25">Crisinel et al., 2013</xref>; <xref ref-type="bibr" rid="ref96">Rey et al., 2023</xref>; <xref ref-type="bibr" rid="ref111">Spence, 2020c</xref>, <xref ref-type="bibr" rid="ref9001">2020d</xref>; <xref ref-type="bibr" rid="ref125">Velasco and Obrist, 2020</xref>; <xref ref-type="bibr" rid="ref114">Spence, 2022</xref>). Meanwhile, through the development and updating of cognitive models, it appears that the search for personalizing both uni- and multisensory experiences has been surprisingly neglected.</p>
</sec>
<sec id="sec4">
<label>1.3</label>
<title>Personalization of sensory experiences and the implication of artificial intelligence (IA)</title>
<p>While traveling, cooking, walking, or engaging in sports, music accompanies us in numerous settings and supports diverse activities (<xref ref-type="bibr" rid="ref28">Dibben and Williamson, 2007</xref>; <xref ref-type="bibr" rid="ref95">Rentfrow, 2012</xref>). The simplified content dissemination and access to online and interactive music streaming services can partly explain the almost ubiquitous presence of music in our daily lives (<xref ref-type="bibr" rid="ref69">Mazziotti and Ranaivoson, 2024</xref>). These platforms, operated by multinational corporations, curate information flow to optimize user engagement and profitability (<xref ref-type="bibr" rid="ref89">Poell et al., 2019</xref>; <xref ref-type="bibr" rid="ref123">van Dijck et al., 2019</xref>; <xref ref-type="bibr" rid="ref128">Webster, 2023</xref>). In this manner, digital technologies supporting music streaming services have reinforced the notion of personalization, enabled by the fusion of social media, streaming services, causal inference, machine learning, and AI (<xref ref-type="bibr" rid="ref69">Mazziotti and Ranaivoson, 2024</xref>). By suggesting music recommendations based on the ones previously listened to by the consumer, digital platforms extract and predict similarities in music taste. At a great scale, these processes are not without environmental impacts and social consequences, leading to polarization and exacerbation of class stratification (<xref ref-type="bibr" rid="ref128">Webster, 2023</xref>; <xref ref-type="bibr" rid="ref134">Zhuk, 2023</xref>).</p>
<p>When focusing on the consumer musical experience, the search for the song that will bring the most pleasure, that will help maintain a state of concentration, and physical performance are common behaviors today: out of 43,000 people surveyed by the International Federation of the Phonographic Industry (<xref ref-type="bibr" rid="ref119">The International Federation of the Phonographic Industry, 2023</xref>), 63% of them claimed to search for specific songs in the last month, and 59% of them used personalized music lists. This trend has spurred scientific inquiry into the cognitive and behavioral effects of personalized background music. In two recent studies, <xref ref-type="bibr" rid="ref54">Kiss et al. (2024)</xref> and <xref ref-type="bibr" rid="ref53">Kiss and Linnell (2021)</xref> demonstrated that preferred background music (1) increases task-focused states while reducing mind-wandering ones, and (2) modulates these states (i.e., task-focusing and mind-wandering) through arousal modulation: in a context of background listening increasing arousal, the results suggested that mind-wandering states decreased, whereas task-focusing states increased. These findings highlight the influence of preferred and personalized sensory stimulation on cognition and behavior via individuals&#x2019; arousal and open the possibility of investigating such influences with other kinds of uni- or multisensory experiences.</p>
<p>Like music or light, other kinds of sensory stimuli can be broadcast with a click on our smartphones. Although it is still considered a curiosity or even ignored by most of the population, the number of studies investigating the possibility of broadcasting smells with small technological devices, like smartphones, is growing (<xref ref-type="bibr" rid="ref47">Huang and Chen, 2023</xref>; <xref ref-type="bibr" rid="ref62">Maggioni et al., 2018</xref>, <xref ref-type="bibr" rid="ref63">2019</xref>; <xref ref-type="bibr" rid="ref66">Matsukura et al., 2013</xref>). Within the next few years, we will likely be able to enhance the mastering of our olfactory environment. However, the underestimated importance of the sense of smell in contemporary Western societies may delay its development (<xref ref-type="bibr" rid="ref45">Herz and Bajec, 2022</xref>; <xref ref-type="bibr" rid="ref99">Schifferstein, 2006</xref>; <xref ref-type="bibr" rid="ref131">Wrzesniewski, 1999</xref>), although olfaction is a sense that develops early in humans, supporting the development of vision (<xref ref-type="bibr" rid="ref9002">Rekow and Leleu, 2023</xref>; <xref ref-type="bibr" rid="ref9003">Schaal et al., 2020</xref>). A question might arise while reading the last sentences: what is the link between personalization of our sensory environment and the development of olfactory research? Beyond being an important cultural and identity marker (<xref ref-type="bibr" rid="ref13">Boswell, 2008</xref>; <xref ref-type="bibr" rid="ref64">Majid, 2015</xref>), smells participate in our social life and provide important health indicators (<xref ref-type="bibr" rid="ref100">Schwambergov&#x00E1; et al., 2024</xref>), influence our emotional state (<xref ref-type="bibr" rid="ref127">Villemure et al., 2003</xref>), and are at the center of sensory strategies to try to influence consumer behavior (<xref ref-type="bibr" rid="ref29">Douc&#x00E9; and Janssens, 2013</xref>; <xref ref-type="bibr" rid="ref78">Nibbe and Orth, 2017</xref>; <xref ref-type="bibr" rid="ref108">Spence, 2015</xref>; <xref ref-type="bibr" rid="ref118">Teller and Dennis, 2012</xref>; <xref ref-type="bibr" rid="ref132">Yang and Cai, 2024</xref>). Odors bring personal information, and their cognitive processing is intrinsically linked to our autobiographical memory (<xref ref-type="bibr" rid="ref39">Hackl&#x00E4;nder et al., 2019</xref>; <xref ref-type="bibr" rid="ref96">Rey et al., 2023</xref>; <xref ref-type="bibr" rid="ref129">Willander and Larsson, 2006</xref>). The literary anecdote of Proust&#x2019;s madeleine illustrates an emotional and memory phenomenon that everyone can experience: a smell can trigger the revival of an emotion linked to a particular autobiographical memory. Numerous studies support that odor-evoked emotion can be particularly intense and that the memory recovered by olfactory stimulation could be linked to old moments in life, sometimes belonging to the first 10&#x202F;years of life marketing (<xref ref-type="bibr" rid="ref19">Chu, 2000</xref>; <xref ref-type="bibr" rid="ref44">Herz, 2016</xref>; <xref ref-type="bibr" rid="ref50">Jellinek, 2004</xref>; <xref ref-type="bibr" rid="ref57">Larsson et al., 2006</xref>; <xref ref-type="bibr" rid="ref122">Toffolo et al., 2012</xref>; <xref ref-type="bibr" rid="ref130">Willander and Larsson, 2007</xref>). For all these reasons, the development of smell diffusion cannot neglect the concept of personalization and cannot only focus on common affective characteristics such as pleasantness, familiarity, or irritability.</p>
<p>The global influence of personalized sensory stimulation is still poorly understood in psychology, possibly due to the difficulty of adapting scientific protocols, but also due to the absence of a consensus regarding a method to personalize sensory stimuli. For example, studies exploring the effects of participants&#x2019; musical preferences have selected preferred music excerpt based on choices that were constrained: from a sample of musical excerpts of varying lengths, the music that induces the most pleasure or displeasure can be considered as preferred and personalized (<xref ref-type="bibr" rid="ref48">Huang and Shih, 2011</xref>; <xref ref-type="bibr" rid="ref51">Johansson et al., 2012</xref>; <xref ref-type="bibr" rid="ref77">Nemati et al., 2019</xref>; <xref ref-type="bibr" rid="ref86">Perham and Sykora, 2012</xref>). Reconsideration of musical personalization is recent and uncommon in this field of research, with participants being asked to bring their favorite CD or playlist and listen to it while performing a cognitive task (<xref ref-type="bibr" rid="ref26">Darrow et al., 2006</xref>; <xref ref-type="bibr" rid="ref73">Mori et al., 2014</xref>). Those studies suggest that listening to favorite, personalized music has a positive influence on certain cognitive performances, particularly attention by modifying the arousal state of the participant (<xref ref-type="bibr" rid="ref26">Darrow et al., 2006</xref>; <xref ref-type="bibr" rid="ref53">Kiss and Linnell, 2021</xref>; <xref ref-type="bibr" rid="ref73">Mori et al., 2014</xref>). In other studies, the positive effect of preferred and personalized music on cognitive performance is based on factors related to the cultural background of the participants (<xref ref-type="bibr" rid="ref56">Kotsopoulou and Hallam, 2010</xref>; <xref ref-type="bibr" rid="ref71">Mohan and Thomas, 2020</xref>). The recent and growing interest in the use of personalized sensory stimuli for therapeutic applications (<xref ref-type="bibr" rid="ref36">Grifoni et al., 2023</xref>), notably regarding Virtual Reality (<xref ref-type="bibr" rid="ref60">Lee et al., 2024</xref>; <xref ref-type="bibr" rid="ref82">Pardini et al., 2022</xref>; <xref ref-type="bibr" rid="ref87">Pizzoli et al., 2022</xref>; <xref ref-type="bibr" rid="ref107">Solc&#x00E0; et al., 2021</xref>), led researchers to investigate the importance and influences of sensory stimuli personalization. Choosing a preferred sensory stimulus over a panel or bringing one into laboratory experiments (e.g., a music excerpt) involves its evaluation and comparison. Choosing a preferred sensory stimulus over a panel or bringing it into laboratory experiments (like a music excerpt) involves its evaluation and comparison. Scientists investigate the importance of people&#x2019;s metacognitive judgments on evaluations and choices regarding musical background during working, learning, or study conditions. These studies are complementary to research on individual preferences, firstly because metacognitive judgments characterize the perceived propensity of a stimulus, such as music, to help or distract a person in performing a task. Secondly, by studying the incidence of retrospective metacognitive judgments, a positive correlational link has been demonstrated between the perceived pleasantness of music and the likelihood of judging it as improving cognitive performance (<xref ref-type="bibr" rid="ref10">Bell et al., 2023b</xref>).</p>
<p>In the <xref ref-type="bibr" rid="ref9">Bell et al. (2023a)</xref> study, participants&#x2019; objective cognitive performance in serial recall tasks was impacted by the presence of music and whether the music was liked. At the same time, <xref ref-type="bibr" rid="ref9">Bell et al. (2023a)</xref> investigated the validity of metacognitive judgments about the effects of irrelevant auditory stimuli (piano melodies and Mozart&#x2019;s sonata) on cognitive task performance. The authors aimed to confirm one of two theories: (1) the direct-access account, according to which people base their metacognitive judgments on direct and conscious access to the distracting or helpful features of an auditory stimulus, and (2) the processing-fluency account, according to which people base these same metacognitive judgments on similar past experiences, whether conclusive or not. According to these two theories, the repetition of irrelevant auditory stimuli would lead to increasingly less negative metacognitive judgments. Yet, unlike the direct access explanation, fluent processing theory does not require knowledge of the precise helping or distracting characteristics of a stimulus, so an individual can make metacognitive judgments that conflict with the objectively and scientifically provable effects of a stimulus on their abilities. Through two experiments, the scientists manipulated the processing fluency of musical excerpts while maintaining their musical complexity. The scientists assumed that people are familiar and accustomed to the sound of music played in the normal direction of listening (i.e., forward), rather than in reverse (i.e., backward). The authors aimed to create an illusion of fluidity in the processing of musical stimuli and hypothesized that participants would have the illusion of performing better on a cognitive task with music played &#x201C;forward&#x201D; despite its complexity compared to the same music played &#x201C;backward.&#x201D; In other words, the scientists expected participants to wrongly judge music played &#x201C;forward&#x201D; as less distracting than music played &#x201C;backward. <xref ref-type="bibr" rid="ref9">Bell et al. (2023a)</xref> demonstrate that direct experience of performing a task with music helped attenuate the illusion of metacognitive judgment without eliminating it. In other words, participants modified their judgment about the distracting nature of the sensory stimuli (i.e., the background music broadcasted during the cognitive task) after the experience. Besides, participants judged the same music &#x201C;backward&#x201D; as being more distracting than in a normal listening mode. Finally, these results showed that human metacognitive judgments about the suitability of music to aid cognitive task performance are based on fluid processing.</p>
<p>This discovery highlights the potential for poor metacognitive judgments about environmental conditions that would promote good cognitive performance. In other words, we should be cautious about how a sensory stimulus, perceived as pleasant and potentially chosen among others based on metacognitive judgments, has a genuinely positive influence on our cognition and behavior. However, the lack of knowledge about the influences of personalized sensory stimuli on cognition, emotion, and behavior does not deter from the growing interest and use of AI to personalize experiences. Technological advances, such as computing power, machine learning, or data storage are combined with theoretical developments, helping researchers transform theoretical concepts into applications. The personalization of communication content and user experience is not only &#x201C;based on individuals&#x2019; preferences, interests, demographics, and past behavior, item features and characteristics, or similar tastes of others, but also on psychological factors&#x2014;the method of psychological targeting&#x201D; (<xref ref-type="bibr" rid="ref42">Hermann, 2022</xref>, p. 5). Individual psychological traits, such as personality traits and emotional states, are also computationally predicted by algorithms thanks to their purchase history and digital footprint (<xref ref-type="bibr" rid="ref67">Matz et al., 2017</xref>; <xref ref-type="bibr" rid="ref68">Matz and Netzer, 2017</xref>; <xref ref-type="bibr" rid="ref33">Gao and Liu, 2023</xref>).</p>
<p>Yet, if a growing body of research gathers many fields like marketing, economy, welfare and public policy, computer sciences, and statistics, it appears that psychology could give interesting insights through theoretical models, practical feasibility, and therapeutic applications. Based on the cognitive models presented above and the research focusing on metacognitive judgments, we propose a cognitive model to represent the potential influences of uni- and multisensory stimuli on cognition and behavior.</p>
</sec>
<sec id="sec5">
<label>1.4</label>
<title>Proposal for a cognitive model representing the influences of uni- and multisensory stimuli on cognitive performances</title>
<p>The structure of this model (<xref ref-type="fig" rid="fig1">Figure 1</xref>) is based on the Cognitive Capacity Hypothesis (<xref ref-type="bibr" rid="ref52">Kahneman, 1973</xref>), which posits that task performance relies on an individual&#x2019;s limited and variable available capacity, modulated by their arousal. By integrating Load Theory (<xref ref-type="bibr" rid="ref58">Lavie, 2005</xref>, <xref ref-type="bibr" rid="ref59">2010</xref>), we account for the influence of cognitive and perceptual loads from both tasks and distractors, while also considering task complexity and cognitive aging effects. Further incorporating Distraction-Conflict Theory (<xref ref-type="bibr" rid="ref7">Baron, 1986</xref>; <xref ref-type="bibr" rid="ref35">Gonzalez and Aiello, 2019</xref>) allows for the inclusion of potential beneficial effects of distractors on arousal. Finally, the Strength and Vulnerability Integration model (<xref ref-type="bibr" rid="ref18">Charles, 2010</xref>), addresses age-related differences in arousal regulation. The proposed model comprises several interconnected modules (<xref ref-type="fig" rid="fig2">Figure 2</xref>).</p>
<fig position="float" id="fig1">
<label>Figure 1</label>
<caption>
<p>Synthesized interactions between the different theories and models used to develop the Model of the influence of uni and multisensory environments on cognitive performances.</p>
</caption>
<graphic xlink:href="fcomp-07-1652537-g001.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">Flowchart diagram with five interconnected boxes. Top box is red, labeled &#x201C;Strength and Vulnerability Integration model&#x201D; by Charles, 2010, mentioning &#x201C;Age-related differences in arousal regulation.&#x201D; Below it, a teal box titled &#x201C;Cognitive Capacity Hypothesis&#x201D; by Kahneman, 1970, mentioning "Model&#x2019;s structure", "Task performance depends on the individual&#x2019;s limits", "Variable available capacity depending on the arousal state". A purple box at the bottom labeled &#x201C;Metacognitive judgments&#x201D; by Bell and others, 2023, noting two outputs on performance feeling and objectivity. On the right, a white box titled &#x201C;Load Theory&#x201D; by Lavie, 2005, 2010, discussing task loads and task complexity. Connected to it, a green box &#x201C;Distraction-Conflict Theory&#x201D; by Baron, 1986, and Gonzalez &#x0026; Aiello, 2019, on potential beneficial effects of distractor. Arrows between them suggest dependencies.</alt-text>
</graphic>
</fig>
<fig position="float" id="fig2">
<label>Figure 2</label>
<caption>
<p>Model of the influence of uni and multisensory environments on cognitive performances. The numbers indicate the reading order.</p>
</caption>
<graphic xlink:href="fcomp-07-1652537-g002.tif" mimetype="image" mime-subtype="tiff">
<alt-text content-type="machine-generated">Flowchart depicting determinants and manifestations of arousal. It includes factors like sensory stimuli, habits, age, and preferences influencing arousal. Key elements are sensory stimuli, capacity allocation, arousal evaluation, and response manifestations. Elements connect through directional arrows, illustrating interactions between capacities, policies, tasks, performances, and environmental stimuli.</alt-text>
</graphic>
</fig>
<sec id="sec6">
<label>1.4.1</label>
<title>Possible activities and tasks</title>
<p>The model begins with the &#x201C;Activities and tasks&#x201D; module (module &#x201C;1&#x201D; in <xref ref-type="fig" rid="fig2">Figure 2</xref>). All activities and tasks are dependent on an input, which is the available capacity. According to the Cognitive Capacity Hypothesis model, an activity carries a cognitive load, a consideration also found in the Load Theory model: a task carries a cognitive load and a perceptual one, just like potential distractors. By linking the two models, a task that would have a high perceptual load could be provided with available capacity, thus saturating the stock of available capacity. Distractors with a perceptual load equivalent to the task could not be processed. In the case of a task with a high cognitive load and low perceptual load, the latter could be interrupted by one or more diverse environmental determinants, which would be endowed with the same type of load or having a greater perceptual load.</p>
<p>The Cognitive Capacity Hypothesis model suggests that successfully performing multiple activities simultaneously would depend on the capacity required to perform each of them separately; the complexity of the activity or task is therefore important to consider. A simple activity or task would be poorly demanding in available capacity and would have a low cognitive load, whereas a difficult activity would be very demanding in cognitive capacity and would have a high cognitive load. If the activity or task exceeds the individual&#x2019;s available capacity, its execution would therefore be hindered. Less available capacity provided would cause a deterioration in performance compared to the standard expected for its completion, and an activity or task that had a capacity demand higher than the individual&#x2019;s cognitive limit would be impossible to undertake. Moreover, the perceived complexity of an activity varies according to an individual&#x2019;s arousal and therefore errors made during the task can provide information regarding the variation in activation state with the difficulty of the activity performed. Unused capacities reduced by the difficulty of the task would be proven by failure to detect a signal that is normally detected easily or a slower response.</p>
<p>Important points:</p>
<list list-type="bullet">
<list-item>
<p>A task with high perceptual load may saturate available capacity, preventing concurrent processing of distractors with similar loads.</p>
</list-item>
<list-item>
<p>A task with high cognitive load but low perceptual load remains vulnerable to interruption by environmental determinants (e.g., distractors) of comparable or greater perceptual load.</p>
</list-item>
<list-item>
<p>Simple tasks require minimal capacity and exhibit low cognitive loads.</p>
</list-item>
<list-item>
<p>Complex tasks demand substantial capacity; exceeding an individual&#x2019;s limits leads to performance failure.</p>
</list-item>
<list-item>
<p>Performance errors (e.g., missed signals or delayed responses) reflect fluctuations in arousal relative to task difficulty.</p>
</list-item>
</list>
</sec>
<sec id="sec7">
<label>1.4.2</label>
<title>Evaluation of demands on capacity and allocation policy</title>
<p>The &#x201C;Evaluation of Capacity Demands&#x201D; (module &#x201C;2.a&#x201D; in <xref ref-type="fig" rid="fig2">Figure 2</xref>) and &#x201C;Allocation Policy&#x201D; modules (module &#x201C;2.b&#x201D; in <xref ref-type="fig" rid="fig2">Figure 2</xref>) are central elements of the Cognitive Capacity Hypothesis model. The function of the evaluation of demands on capacity is to assess the capacity required to achieve an activity or task, based on the arousal and available capacity, which vary together. This module can therefore be considered as a governance system that informs the allocation policy of the available capacity. The evaluation of demands can, however, suffer from an individual&#x2019;s fatigue or agitation, and therefore from an activation state that is too low or too high. The evaluation of demands manages the stock of available capacity and its distribution. Together with the evaluation of demands, the allocation policy recalls the role played by the supervisory system in <xref ref-type="bibr" rid="ref103">Shallice&#x2019;s (1988)</xref> model, which regulates and selects alternative response patterns, and thus allows behavioral adaptation during a non-routine situation. The allocation policy is directly influenced by the evaluation of demands on capacity, but also by enduring dispositions, momentary intentions, and arousal. According to the Cognitive Capacity Hypothesis, the attribution strategy system favors perceptual activities that require a large available capacity to the detriment of less demanding perceptual ones. This can be compared to tasks with a high perceptual load compared to low perceptual load ones.</p>
<p>Important points:</p>
<list list-type="bullet">
<list-item>
<p>Perceptual activities with higher capacity demands are prioritized by the allocation priorities over fewer demanding ones.</p>
</list-item>
<list-item>
<p>Fatigue or hyperarousal can impair the evaluation of capacity demand, disrupting capacity distribution.</p>
</list-item>
</list>
</sec>
<sec id="sec8">
<label>1.4.3</label>
<title>Enduring dispositions and momentary intentions</title>
<p>Two other modules influence the allocation policy. First, the &#x201C;Enduring dispositions&#x201D; (module &#x201C;3.a&#x201D; in <xref ref-type="fig" rid="fig2">Figure 2</xref>), described by Kahneman as involuntary attention. It would allow a transient effort to process and analyze the stimulus that captured attention, but also the inhibition of current activity as well as the attentional orientation of the individual toward future sources of relevant information. Then, the &#x201C;Momentary intentions&#x201D; (module &#x201C;3.b&#x201D; in <xref ref-type="fig" rid="fig2">Figure 2</xref>) refers to a phenomenon of voluntary and active attention, close to selective attention. These two direct the attention paid to stimuli, and therefore to environmental determinants. While many empirical experiments demonstrated that attention can be influenced and redirected in a voluntary or involuntary way by the pop-up of environmental stimuli (<xref ref-type="bibr" rid="ref16">Carreti&#x00E9;, 2014</xref>; <xref ref-type="bibr" rid="ref23">Cloutier et al., 2020</xref>; <xref ref-type="bibr" rid="ref76">Nadon et al., 2021</xref>), Kahneman&#x2019;s model does not indicate a link of influence between attentional modules and environmental determinants. Finally, in our model, individual determinants also provide information on the individual&#x2019;s activation state and modulate attention. The functioning of these two attentional modules therefore varies according to determinants internal and external to the individual, which influences the allocation policy of available capacity.</p>
<p>Important points:</p>
<list list-type="bullet">
<list-item>
<p>Enduring disposition facilitates transient stimulus processing and inhibit ongoing activities to orient toward salient information.</p>
</list-item>
<list-item>
<p>Momentary intention reflects goal-directed focus, analogous to selective attention.</p>
</list-item>
<list-item>
<p>Both are influenced by environmental and individual determinants.</p>
</list-item>
<list-item>
<p>While Kahneman&#x2019;s original model omitted these linkages, we explicitly integrate them to reflect empirical evidence on attentional modulation by external/internal stimuli.</p>
</list-item>
</list>
</sec>
<sec id="sec9">
<label>1.4.4</label>
<title>Available capacity and arousal</title>
<p>The &#x201C;Available capacity&#x201D; (module &#x201C;4.a&#x201D; in <xref ref-type="fig" rid="fig2">Figure 2</xref>) is central, its mobilization does not depend on the individual&#x2019;s intention but on the &#x201C;Arousal&#x201D; (module &#x201C;4.b&#x201D; in <xref ref-type="fig" rid="fig2">Figure 2</xref>) required to perform an activity, which itself varies depending on miscellaneous environmental determinants. The arousal level varies from a low state (gray area of the gauge) in which distractors cannot be inhibited, which can manifest as a state of mind wandering, to a high state (red area) in which the inhibition of relevant environmental determinants would prevent the adaptation of individuals&#x2019; behaviors, being visible with a state of agitation or perseveration. A moderately high state (green area) allows for the inhibition of distractors and the consideration of relevant information to perform an activity with success. The Strength and Vulnerability Integration model adds nuance by modulating arousal with aging. This model argues that aging negatively impacts physiological flexibility and, consequently, maintaining and returning to a moderate homeostatic state would be more difficult compared to younger adults. Thus, when miscellaneous environmental determinants negatively impact homeostatic balance, the activation state of older adults remains too high for a prolonged period, inducing the inhibition of miscellaneous relevant environmental determinants.</p>
<p>Here, AI could be used to detect the real-time fluctuation of arousal. Several physiological signals governed by the autonomic nervous system can be monitored, and peripheral sensors such as electrocardiogram, photoplethysmography, pupillometry, electrodermal activity, skin temperature, respiratory cycle, or electromyogram can be used individually or in combination (<xref ref-type="bibr" rid="ref81">Paniagua-G&#x00F3;mez and Fernandez-Carmona, 2025</xref>; <xref ref-type="bibr" rid="ref84">Pelagatti et al., 2025</xref>). These peripheral sensors are suitable for continuous and unobtrusive monitoring (e.g., electrodes, eye tracking, patches, and wristbands), but the collected data could be, at the same time, confounded by behavioral factors like posture or physical activity. To grasp the multidimension of arousal, some behavioral indicators like facial expressions, postures, physical activity, computer interaction patterns, or voice characteristics could be combined with physiological signals (<xref ref-type="bibr" rid="ref94">Reid et al., 2025</xref>). Personalization of these physiological and behavioral measures could be done using user-specific calibration, person-specific models, and adaptive to obtain personalized thresholds, training separate models for each individual, and dynamic models being able to adjust to individual baseline physiological levels over time (e.g., circadian rhythms, changes in health status, etc.) (<xref ref-type="bibr" rid="ref81">Paniagua-G&#x00F3;mez and Fernandez-Carmona, 2025</xref>). However, if AI could help to monitor multiple behavioral and physiological signals, it is essential to have total transparency of all the procedures, and to ensure the protection of the collected data. In this manner, models must have the possibility to unlearn from one individual&#x2019;s data, who would use his or her right to be forgotten (<xref ref-type="bibr" rid="ref81">Paniagua-G&#x00F3;mez and Fernandez-Carmona, 2025</xref>).</p>
<p>Important points:</p>
<list list-type="bullet">
<list-item>
<p>Available capacity is non-volitionally mobilized by arousal, which fluctuates with environmental and individual determinants, and prior activities.</p>
</list-item>
<list-item>
<p>Arousal states can be low (poor distractor inhibition, manifesting as mind-wandering), moderately high (optimal distractor inhibition and task-relevant information processing), or high (inhibition of relevant inputs, causing agitation or perseveration).</p>
</list-item>
<list-item>
<p>The Strength and Vulnerability Integration model adds nuance: older adults exhibit reduced physiological flexibility, prolonging high arousal states and impairing homeostasis.</p>
</list-item>
</list>
</sec>
<sec id="sec10">
<label>1.4.5</label>
<title>Miscellaneous environmental and individual determinants</title>
<p>Available capacity is dependent on the individual&#x2019;s arousal and cannot be modulated voluntarily. Just like ongoing activities and tasks, the &#x201C;Miscellaneous environmental determinants&#x201D; (module &#x201C;5&#x201D; in <xref ref-type="fig" rid="fig2">Figure 2</xref>) and the &#x201C;Miscellaneous individual determinants&#x201D; (module &#x201C;6&#x201D; in <xref ref-type="fig" rid="fig2">Figure 2</xref>) modulate arousal states. When arousal is low, environmental and individual determinants are not blocked. In this condition, mental wandering induced by fatigue or by a simple task can hinder performance with a moderate or low perceptual or cognitive load. Conversely, when the arousal is high, the inhibition of potentially relevant environmental and individual determinants is not processed. Finally, a moderately high arousal state allows the inhibition of distractors and the consideration of relevant information. The Cognitive Capacity Hypothesis highlights the potential overload of cognitive capacity when miscellaneous determinants rely on the same capacities as the activity or task. Thus, the evaluation of demands on capacity and the allocation policy can direct available capacity toward the attentional processing of miscellaneous determinants, an individual&#x2019;s sensory environment can therefore influence its cognitive performances. Kahneman provided very few details about diverse environmental determinants, giving few examples like the intensity of stimulation, the physiological effects of drugs, or training. This module could group an infinite number of stimuli, including sensory stimuli potentially considered as distractors. By gathering the Load Theory to it, the perceptual and cognitive loads of each distractor would influence individuals&#x2019; available capacities, capacities that would then be allocated for the performance of activities and tasks according to their cognitive and perceptual demands.</p>
<p>The Load Theory does not allow us to consider a positive influence of music, smells, or other sensory stimulations, which are then considered as distractors, irrelevant information with perceptual and cognitive loads that can compete with those of the activity. However, in the Distraction-Conflict Theory, it is possible to propose an alternative. During a simple task, distractors with a low cognitive load could create a slight attentional conflict, increasing and maintaining the arousal moderately high. This conflict would prevent the arousal from decreasing too low, which could result in a state of mind wandering. Consequently, a distractor would adjust the arousal state to the cognitive load required by the task to be performed. According to the Distraction-Conflict Theory, performing a complex task with high cognitive load could not be aided by a distractor, because the cognitive loads of the task and the distractor would compete, creating an attentional conflict that would excessively increase the activation state. The Distraction-Conflict Theory, revised by <xref ref-type="bibr" rid="ref35">Gonzalez and Aiello (2019)</xref>, models the influence of music with a low or high cognitive load but does not propose the existence of a perceptual load. According to the Load Theory, all distractors carry a cognitive and perceptual load. In the case of music, it is possible to hypothesize that an increase in a perceptual load (e.g., volume, rhythm) would consequently induce an increase in cognitive load. In the case of odors or other sensory stimuli, it is difficult to grant with certainty the existence of a cognitive load. While perceptual load may be related to its intensity, the cognitive load of a sensory stimulus, such as a smell, may vary depending on an individual&#x2019;s experience. Therefore, it seems necessary to assume that affective load, in addition to perceptual and cognitive load, may also influence individuals&#x2019; arousal.</p>
<p>Our model postulates that environmental determinants include sensory stimuli. Depending on whether they are perceived as favorable or unfavorable for cognitive performance, whether they are uni- or multi-sensory, personalized or imposed, with cross-modal correspondences or not, we assume that sensory stimuli do not influence in the same way the performances felt by individuals and the performances achieved by them. Thus, each sensory stimulus would have, on the one hand, perceptual and cognitive loads dependent on characteristics specific to them, and on the other hand, an affective load dependent on the miscellaneous individual determinants of each person perceiving the stimuli. The presence of an output dedicated to performance feelings, therefore, marks the difference between performance feelings and real performances, which could be differently affected by individual characteristics (miscellaneous individual determinants), the activity or task to achieve, and the stimuli (miscellaneous environmental determinants). This proposal is supported by the studies that investigated metacognitive judgments (<xref ref-type="bibr" rid="ref9">Bell et al., 2023a</xref>; <xref ref-type="bibr" rid="ref10">Bell et al., 2023b</xref>), which assume a difference between individuals&#x2019; metacognitive judgments regarding the influence of sensory stimuli on their cognitive performance and objectively achieved performance.</p>
<p>Finally, as <xref ref-type="bibr" rid="ref34">Goltz and Sadakata (2021)</xref> observed, a greater frequency of musical listening during cognitive activities is a habit found significantly more often among younger adults compared to older adults. Age influences the habits of use and modification of sensory environments, which could modulate the arousal and the modules of enduring dispositions and momentary intentions when carrying out a cognitive task. It therefore seems important that the model we propose in this article includes a dimension of miscellaneous individual determinants that brings together daily habits, age, or even sensory preferences.</p>
<p>Important points:</p>
<list list-type="bullet">
<list-item>
<p>Miscellaneous environmental and individual determinants interact with arousal.</p>
</list-item>
<list-item>
<p>Miscellaneous environmental determinants (e.g., sensory stimuli) compete for capacity based on perceptual and cognitive loads. Kahneman&#x2019;s sparse examples (e.g., drug effects) are expanded here to include multisensory distractors.</p>
</list-item>
<list-item>
<p>Miscellaneous individual determinants (e.g., age, habits) modulate arousal thresholds and attentional biases (e.g., younger adults more frequently use music during tasks).</p>
</list-item>
<list-item>
<p>The Load Theory posits that all distractors have perceptual and cognitive loads, but affective loads (e.g., odor valence) may also shape arousal. Crucially, a feeling of performance (subjective) may diverge from actual performance (objective) due to metacognitive biases.</p>
</list-item>
</list>
</sec>
<sec id="sec11">
<label>1.4.6</label>
<title>Miscellaneous manifestations of arousal</title>
<p>The miscellaneous manifestations of arousal (module &#x201C;7&#x201D; in <xref ref-type="fig" rid="fig2">Figure 2</xref>) are addressed by <xref ref-type="bibr" rid="ref52">Kahneman (1973)</xref> only in the form of illustrations. No textual details are provided, except for a list of possible observations of the disruption of the homeostatic state of individuals, with examples cited: increased heart rate, pupil dilation, and skin conductance. The use of physiological measures would allow for more objective measurements of changes in the activation state induced by the diffusion of sensory stimuli. The goal would be a better understanding of the influence of these changes on cognitive performance, notably depending on age. However, these manifestations of arousal should not be limited to the measurement of physiological constants. Through this model, we propose to extend these manifestations to the affective and emotional states experienced by individuals, feeding a bidirectional relationship with the feelings of performance. In this manner, the miscellaneous manifestations of arousal would directly inform people&#x2019;s feelings of performance, and the latter could have a retroactive action of regulating emotions (<xref ref-type="bibr" rid="ref30">Egloff et al., 2006</xref>; <xref ref-type="bibr" rid="ref37">Gross, 2002</xref>).</p>
<p>Important points:</p>
<list list-type="bullet">
<list-item>
<p><xref ref-type="bibr" rid="ref52">Kahneman (1973)</xref> briefly cited arousal indicators (e.g., heart rate, pupillometry). Our integrative model extends these to include miscellaneous manifestation of arousal, bidirectionally linked to feeling of performances.</p>
</list-item>
<list-item>
<p>Physiological metrics could provide objective arousal measures to disentangle sensory influences across age groups.</p>
</list-item>
</list>
</sec>
</sec>
</sec>
<sec sec-type="discussion" id="sec12">
<label>2</label>
<title>Discussion</title>
<p>We aimed to propose an integrative cognitive model to better understand how the personalization of the sensory environment influences behavior, emotion, and cognition. The integration of the Cognitive Capacity Hypothesis, the Load Theory, the Distraction-Conflict Theory, and the Strength and Vulnerability Integration model provides insights into how individual characteristics, environmental factors, and task demands collectively shape cognitive and behavioral outcomes. However, their explanatory power remains limited when applied to complex multisensory contexts, as opposed to unisensory experiences. While these models can be useful, they rely on a linear and static view of the interactions between cognitive load, arousal, and performance, without finely integrating affective load or the combined effects of multiple sensory modalities. The study by <xref ref-type="bibr" rid="ref3">Baccarani et al. (2023)</xref> provides an illustrative example: although one might expect a &#x201C;multisensory gain&#x201D; from combining relaxant music and scent, two structurally congruent stimuli, no additional benefit was observed compared to unisensory conditions. This outcome challenges the assumption of a simple additive effect of sensory inputs and encourages the adoption of more integrative models, which consider the type of correspondence (affective, structural, or semantic), the arousing properties of the sensory stimuli used, and their effect on the individual&#x2019;s arousal state. Furthermore, the model proposed in this article offers a valuable conceptual contribution: drawing on the Strength and Vulnerability Integration model, it introduces interindividual variability, notably age-related differences, in the ability to modulate arousal. This aspect could be crucial, as <xref ref-type="bibr" rid="ref34">Goltz and Sadakata (2021)</xref> have shown that younger adults more frequently engage in active modulation of their sensory environment (e.g., listening to music while working) than older adults. Any model attempting to explain cognitive performance in sensory environments should therefore account for these developmental and contextual differences.</p>
<p>The growing trend toward personalization of uni- and multisensory environments has attracted significant scientific attention, as it emerges as a potential critical factor, capable of modulating an individual&#x2019;s cognitive performances. However, recent findings by <xref ref-type="bibr" rid="ref10">Bell et al. (2023b)</xref> and <xref ref-type="bibr" rid="ref9">Bell et al. (2023a)</xref> offer a nuanced perspective. While participants reported that familiar music improves their performance, these metacognitive judgments do not always align with objectively measured outcomes: the familiarity of a music excerpt creates an illusion of improved performance, whereas objective results demonstrate no benefits compared to music perceived as more distractive. This distinction highlights the methodological need to further link experimental research with the analysis of metacognitive and affective judgments. Doing so would not only enhance our understanding of cognitive adaptation in real-life contexts but also improve the evaluation of personalized sensory strategies. In this sense, individual preferences should not merely be seen as matters of taste, but rather as complex vectors of emotional, motivational, and cognitive regulation. The model proposed in this article integrates this tension between subjective and objective performance by introducing two distinct cognitive outputs: one for objectively assessed performance and another for subjective experience.</p>
<p>Another important concept of this model is the modification of the sensory environment to create an attentional conflict during repetitive or simple cognitive tasks, resulting in increased arousal and the interruption of mind-wandering. In this manner, this attentional conflict prevents the disruption of responses to immediate external inputs by mind-wandering. In this case, mind-wandering is considered to impair behavioral responses and cognitive engagement in the task. However, this phenomenon also has its benefits and should not be fully avoided during activities. Mind-wandering is associated with a large-scale neural network called the Default Mode Network (DMN). DNM involves regions which are thought to be more devoted to encoding scenes and context, to abstract thinking, memory representation and retrieval, generated experiences, and less to the treatment of input information from the external world (<xref ref-type="bibr" rid="ref15">Buckner and Krienen, 2013</xref>; <xref ref-type="bibr" rid="ref90">Poerio et al., 2017</xref>; <xref ref-type="bibr" rid="ref106">Smith et al., 2018</xref>; <xref ref-type="bibr" rid="ref116">Spreng et al., 2009</xref>). If the use of sensory stimuli could be promising for disrupting mind-wandering, it has also been shown that fast or slow-paced music and evoked emotions modulate DNM&#x2019;s activity, with potential effects on internally oriented cognition (<xref ref-type="bibr" rid="ref117">Taruffi et al., 2017</xref>). As empirical experiences suggest that DNM activation has potential value for creativity and prospective memory (<xref ref-type="bibr" rid="ref105">Smallwood and Schooler, 2015</xref>), various strategies could be used to minimize the disruptive effects of mind-wandering during tasks without totally avoiding it.</p>
<p>Finally, the development of AI raises questions regarding its role in personalizing experiences, leading researchers from different backgrounds to investigate how AI could contribute to the massification of personalization content, which may yield both beneficial and detrimental consequences. On the one hand, it paves the way for sensory environments optimized to support focus, emotional regulation, or cognitive performance. Personalized approaches hold significant promise for enhancing user experiences, therapeutic interventions, and optimizing the sensory environment. For example, in cases of neurodegenerative diseases where verbal communication is impaired, personalized sensory therapies (e.g., music therapy) could be tailored by leveraging real-time emotional feedback from facial expressions, physiological activation patterns, and other physiological markers (<xref ref-type="bibr" rid="ref80">Panahi, 2025</xref>; <xref ref-type="bibr" rid="ref97">Sakamoto et al., 2013</xref>). Also, cognition and intellectual growth can also be supported by AI chatbots, which can reinforce executive functioning (i.e., planning, organization, strategy implementation) through personalized and interactive training (<xref ref-type="bibr" rid="ref85">Pergantis et al., 2025</xref>). The integration of AI into these approaches offers the promise of continuous, real-time personalization, not just fixed interventions, thereby improving care. Such applications underscore the potential of AI to bridge gaps in traditional therapeutic practices while respecting ethical boundaries. On the other hand, it also reinforces social distinction, as highlighted by <xref ref-type="bibr" rid="ref128">Webster (2023)</xref>, which may lead to polarization of preferences and a narrowing of sensory experiences. Nowadays, through the personalization of communication content and user experience, firms often promote improvements in consumer and social welfare, but at the expense of consumer privacy (e.g., user tracking and behavioral targeting) (<xref ref-type="bibr" rid="ref42">Hermann, 2022</xref>; <xref ref-type="bibr" rid="ref92">Rafieian and Yoganarasimhan, 2023</xref>). The ethical considerations surrounding the AI-enabled personalization of sensory environments are notably addressed to prevent issues related to transparency, autonomy, and privacy, as well as ownership, access, control, and retention of the data collected (<xref ref-type="bibr" rid="ref41">Hao et al., 2025</xref>). For example, in the food industry, scent marketing, or air design, the data collected from AI-enabled personalization should not be used for pervasive and targeted advertising to &#x201C;manipulate consumer behavior by unconsciously raising emotions and consequently manipulating purchase decisions&#x201D; (<xref ref-type="bibr" rid="ref31">Emsenhuber, 2011</xref>, p. 344). We should also be careful about respecting copyright and cultural diversity, as AI is used to create digital representations of fragrances, aided by real analyses of the chemical composition of actual odors (<xref ref-type="bibr" rid="ref41">Hao et al., 2025</xref>; <xref ref-type="bibr" rid="ref104">Sinha et al., 2023</xref>). Overall, a foundational understanding of AI algorithms and their welfare implications must be objectively defined and standardized, and AI-enabled personalization of multisensory experiences must be supported by rigorous ethical safeguards, regulatation especially in vulnerable populations.</p>
<p>By integrating insights from cognitive psychology and AI, the model proposed in this article provides a valuable framework for understanding and designing personalized, adaptive, and evidence-based sensory environments. It offers promising directions for both fundamental research and therapeutic innovation, while also urging caution. Further empirical studies and theoretical propositions based on this model should also consider arousal as a multidimensional perspective, which is missing in this first version. To conclude, AI-personalization of our sensory perceptions remains entangled with ethical, cognitive, and subjective considerations that must be addressed within a truly interdisciplinary framework.</p>
</sec>
</body>
<back>
<sec sec-type="author-contributions" id="sec13">
<title>Author contributions</title>
<p>RB: Conceptualization, Visualization, Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing. XC: Conceptualization, Funding acquisition, Project administration, Supervision, Visualization, Writing &#x2013; review &#x0026; editing.</p>
</sec>

<sec sec-type="COI-statement" id="sec15">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="ai-statement" id="sec16">
<title>Generative AI statement</title>
<p>The author(s) declare that no Gen AI was used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p>
</sec>
<sec sec-type="disclaimer" id="sec17">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="ref1"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Aalbers</surname> <given-names>S.</given-names></name> <name><surname>Fusar-Poli</surname> <given-names>L.</given-names></name> <name><surname>Freeman</surname> <given-names>R. E.</given-names></name> <name><surname>Spreen</surname> <given-names>M.</given-names></name> <name><surname>Ket</surname> <given-names>J. C.</given-names></name> <name><surname>Vink</surname> <given-names>A. C.</given-names></name> <etal/></person-group>. (<year>2017</year>). <article-title>Music therapy for depression</article-title>. <source>Cochrane Database Syst. Rev.</source> <volume>11</volume>:<fpage>CD004517</fpage>. doi: <pub-id pub-id-type="doi">10.1002/14651858.CD004517.pub3</pub-id>, PMID: <pub-id pub-id-type="pmid">29144545</pub-id></mixed-citation></ref>
<ref id="ref2"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ansaldo</surname> <given-names>J.</given-names></name> <name><surname>Palazzolo</surname> <given-names>J.</given-names></name> <name><surname>Corveleyn</surname> <given-names>X.</given-names></name></person-group> (<year>2018</year>). <article-title>The effects of the multisensory approach in the outpatient care of a nursing home resident</article-title>. <source>NPG Neurol. Psychiatrie Geriatrie</source> <volume>18</volume>, <fpage>256</fpage>&#x2013;<lpage>263</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.npg.2018.04.002</pub-id></mixed-citation></ref>
<ref id="ref3"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Baccarani</surname> <given-names>A.</given-names></name> <name><surname>Donnadieu</surname> <given-names>S.</given-names></name> <name><surname>Pellissier</surname> <given-names>S.</given-names></name> <name><surname>Brochard</surname> <given-names>R.</given-names></name></person-group> (<year>2023</year>). <article-title>Relaxing effects of music and odors on physiological recovery after cognitive stress and unexpected absence of multisensory benefit</article-title>. <source>Psychophysiology</source> <volume>60</volume>:<fpage>e14251</fpage>. doi: <pub-id pub-id-type="doi">10.1111/psyp.14251</pub-id>, PMID: <pub-id pub-id-type="pmid">36700294</pub-id></mixed-citation></ref>
<ref id="ref4"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Baddeley</surname> <given-names>A.</given-names></name></person-group> (<year>1996</year>). <article-title>The fractionation of working memory</article-title>. <source>Proc. Natl. Acad. Sci.</source> <volume>93</volume>, <fpage>13468</fpage>&#x2013;<lpage>13472</lpage>. doi: <pub-id pub-id-type="doi">10.1073/pnas.93.24.13468</pub-id>, PMID: <pub-id pub-id-type="pmid">8942958</pub-id></mixed-citation></ref>
<ref id="ref5"><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Baines</surname> <given-names>L.</given-names></name></person-group> (<year>2008</year>). <source>A teacher&#x2019;s guide to multisensory learning: Improving literacy by engaging the senses (Arlington, Virginia: ASCD)</source>.</mixed-citation></ref>
<ref id="ref6"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Baird</surname> <given-names>B.</given-names></name> <name><surname>Smallwood</surname> <given-names>J.</given-names></name> <name><surname>Lutz</surname> <given-names>A.</given-names></name> <name><surname>Schooler</surname> <given-names>J. W.</given-names></name></person-group> (<year>2014</year>). <article-title>The decoupled mind: mind-wandering disrupts cortical phase-locking to perceptual events</article-title>. <source>J. Cogn. Neurosci.</source> <volume>26</volume>, <fpage>2596</fpage>&#x2013;<lpage>2607</lpage>. doi: <pub-id pub-id-type="doi">10.1162/jocn_a_00656</pub-id>, PMID: <pub-id pub-id-type="pmid">24742189</pub-id></mixed-citation></ref>
<ref id="ref7"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Baron</surname> <given-names>R. S.</given-names></name></person-group> (<year>1986</year>). &#x201C;<article-title>Distraction-Conflict Theory: Progress and Problems</article-title>&#x201D; in <source>Advances in Experimental Social Psychology. Ed. B. Leonard</source>, Academic Press. <volume>19</volume>, <fpage>1</fpage>&#x2013;<lpage>40</lpage>. doi: <pub-id pub-id-type="doi">10.1016/S0065-2601(08)60211-7</pub-id></mixed-citation></ref>
<ref id="ref8"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Bartoletti</surname> <given-names>R. L.</given-names></name> <name><surname>Denis-No&#x00EB;l</surname> <given-names>A.</given-names></name> <name><surname>Boulvert</surname> <given-names>S.</given-names></name> <name><surname>Lopez</surname> <given-names>M.</given-names></name> <name><surname>Faure</surname> <given-names>S.</given-names></name> <name><surname>Corveleyn</surname> <given-names>X.</given-names></name></person-group> (<year>2023</year>). <article-title>Visuo-tactile congruence leads to stronger illusion than Visuo-proprioceptive congruence: a quantitative and qualitative approach to explore the rubber hand illusion</article-title>. <source>Multisens. Res.</source> <volume>36</volume>, <fpage>477</fpage>&#x2013;<lpage>525</lpage>. doi: <pub-id pub-id-type="doi">10.1163/22134808-bja10101</pub-id>, PMID: <pub-id pub-id-type="pmid">37582516</pub-id></mixed-citation></ref>
<ref id="ref9"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Bell</surname> <given-names>R.</given-names></name> <name><surname>Komar</surname> <given-names>G. F.</given-names></name> <name><surname>Mieth</surname> <given-names>L.</given-names></name> <name><surname>Buchner</surname> <given-names>A.</given-names></name></person-group> (<year>2023a</year>). <article-title>Evidence of a metacognitive illusion in judgments about the effects of music on cognitive performance</article-title>. <source>Sci. Rep.</source> <volume>13</volume>:<fpage>18750</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41598-023-46169-x</pub-id>, PMID: <pub-id pub-id-type="pmid">37907541</pub-id></mixed-citation></ref>
<ref id="ref10"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Bell</surname> <given-names>R.</given-names></name> <name><surname>Mieth</surname> <given-names>L.</given-names></name> <name><surname>R&#x00F6;er</surname> <given-names>J. P.</given-names></name> <name><surname>Buchner</surname> <given-names>A.</given-names></name></person-group> (<year>2023b</year>). <article-title>The reverse Mozart effect: music disrupts verbal working memory irrespective of whether you like it or not</article-title>. <source>J. Cogn. Psychol.</source> <volume>1&#x2013;20</volume>:<fpage>6919</fpage>. doi: <pub-id pub-id-type="doi">10.1080/20445911.2023.2216919</pub-id></mixed-citation></ref>
<ref id="ref11"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Bensafi</surname> <given-names>M.</given-names></name> <name><surname>Rouby</surname> <given-names>C.</given-names></name> <name><surname>Farget</surname> <given-names>V.</given-names></name> <name><surname>Bertrand</surname> <given-names>B.</given-names></name> <name><surname>Vigouroux</surname> <given-names>M.</given-names></name> <name><surname>Holley</surname> <given-names>A.</given-names></name></person-group> (<year>2002</year>). <article-title>Autonomic nervous system responses to Odours: the role of pleasantness and arousal</article-title>. <source>Chem. Senses</source> <volume>27</volume>, <fpage>703</fpage>&#x2013;<lpage>709</lpage>. doi: <pub-id pub-id-type="doi">10.1093/chemse/27.8.703</pub-id>, PMID: <pub-id pub-id-type="pmid">12379594</pub-id></mixed-citation></ref>
<ref id="ref12"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Boesveldt</surname> <given-names>S.</given-names></name> <name><surname>Frasnelli</surname> <given-names>J.</given-names></name> <name><surname>Gordon</surname> <given-names>A. R.</given-names></name> <name><surname>Lundstr&#x00F6;m</surname> <given-names>J. N.</given-names></name></person-group> (<year>2010</year>). <article-title>The fish is bad: negative food odors elicit faster and more accurate reactions than other odors</article-title>. <source>Biol. Psychol.</source> <volume>84</volume>, <fpage>313</fpage>&#x2013;<lpage>317</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.biopsycho.2010.03.006</pub-id>, PMID: <pub-id pub-id-type="pmid">20227457</pub-id></mixed-citation></ref>
<ref id="ref13"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Boswell</surname> <given-names>R.</given-names></name></person-group> (<year>2008</year>). <article-title>Scents of identity: fragrance as heritage in Zanzibar</article-title>. <source>J. Contemp. Afr. Stud.</source> <volume>26</volume>, <fpage>295</fpage>&#x2013;<lpage>311</lpage>. doi: <pub-id pub-id-type="doi">10.1080/02589000802332507</pub-id></mixed-citation></ref>
<ref id="ref14"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Brauchli</surname> <given-names>P.</given-names></name> <name><surname>R&#x00FC;egg</surname> <given-names>P. B.</given-names></name> <name><surname>Etzweiler</surname> <given-names>F.</given-names></name> <name><surname>Zeier</surname> <given-names>H.</given-names></name></person-group> (<year>1995</year>). <article-title>Electrocortical and autonomic alteration by administration of a pleasant and an unpleasant odor</article-title>. <source>Chem. Senses</source> <volume>20</volume>, <fpage>505</fpage>&#x2013;<lpage>515</lpage>. doi: <pub-id pub-id-type="doi">10.1093/chemse/20.5.505</pub-id>, PMID: <pub-id pub-id-type="pmid">8564425</pub-id></mixed-citation></ref>
<ref id="ref15"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Buckner</surname> <given-names>R. L.</given-names></name> <name><surname>Krienen</surname> <given-names>F. M.</given-names></name></person-group> (<year>2013</year>). <article-title>The evolution of distributed association networks in the human brain</article-title>. <source>Trends Cogn. Sci.</source> <volume>17</volume>, <fpage>648</fpage>&#x2013;<lpage>665</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.tics.2013.09.017</pub-id>, PMID: <pub-id pub-id-type="pmid">24210963</pub-id></mixed-citation></ref>
<ref id="ref16"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Carreti&#x00E9;</surname> <given-names>L.</given-names></name></person-group> (<year>2014</year>). <article-title>Exogenous (automatic) attention to emotional stimuli: a review</article-title>. <source>Cogn. Affect. Behav. Neurosci.</source> <volume>14</volume>, <fpage>1228</fpage>&#x2013;<lpage>1258</lpage>. doi: <pub-id pub-id-type="doi">10.3758/s13415-014-0270-2</pub-id>, PMID: <pub-id pub-id-type="pmid">24683062</pub-id></mixed-citation></ref>
<ref id="ref17"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Chancel</surname> <given-names>M.</given-names></name> <name><surname>Ehrsson</surname> <given-names>H. H.</given-names></name></person-group> (<year>2023</year>). <article-title>Proprioceptive uncertainty promotes the rubber hand illusion</article-title>. <source>Cortex</source> <volume>165</volume>, <fpage>70</fpage>&#x2013;<lpage>85</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cortex.2023.04.005</pub-id>, PMID: <pub-id pub-id-type="pmid">37269634</pub-id></mixed-citation></ref>
<ref id="ref18"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Charles</surname> <given-names>S. T.</given-names></name></person-group> (<year>2010</year>). <article-title>Strength and vulnerability integration: a model of emotional well-being across adulthood</article-title>. <source>Psychol. Bull.</source> <volume>136</volume>, <fpage>1068</fpage>&#x2013;<lpage>1091</lpage>. doi: <pub-id pub-id-type="doi">10.1037/a0021232</pub-id>, PMID: <pub-id pub-id-type="pmid">21038939</pub-id></mixed-citation></ref>
<ref id="ref19"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Chu</surname> <given-names>S.</given-names></name></person-group> (<year>2000</year>). <article-title>Odour-evoked autobiographical memories: psychological investigations of proustian phenomena</article-title>. <source>Chem. Senses</source> <volume>25</volume>, <fpage>111</fpage>&#x2013;<lpage>116</lpage>. doi: <pub-id pub-id-type="doi">10.1093/chemse/25.1.111</pub-id>, PMID: <pub-id pub-id-type="pmid">10668001</pub-id></mixed-citation></ref>
<ref id="ref20"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Cieri</surname> <given-names>F.</given-names></name> <name><surname>Cross</surname> <given-names>C. L.</given-names></name> <name><surname>Di Francesco</surname> <given-names>G.</given-names></name> <name><surname>Caldwell</surname> <given-names>J. Z. K.</given-names></name></person-group> (<year>2025</year>). <article-title>Dynamic neurocognitive adaptation: a follow-up Exposome investigation in aging</article-title>. <source>Alzheimers Dement. Transl. Res. Clin. Intervent.</source> <volume>11</volume>:<fpage>103</fpage>. doi: <pub-id pub-id-type="doi">10.1002/trc2.70103</pub-id></mixed-citation></ref>
<ref id="ref21"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Cieri</surname> <given-names>F.</given-names></name> <name><surname>Di Francesco</surname> <given-names>G.</given-names></name> <name><surname>Cross</surname> <given-names>C. L.</given-names></name> <name><surname>Bender</surname> <given-names>A.</given-names></name> <name><surname>Caldwell</surname> <given-names>J. Z. K.</given-names></name></person-group> (<year>2025</year>). <article-title>Dynamic neurocognitive adaptation in aging: development and validation of a new scale</article-title>. <source>Alzheimers Dement. (N Y)</source> <volume>11</volume>:<fpage>49</fpage>. doi: <pub-id pub-id-type="doi">10.1002/trc2.70049</pub-id>, PMID: <pub-id pub-id-type="pmid">39839075</pub-id></mixed-citation></ref>
<ref id="ref22"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Clare</surname> <given-names>A.</given-names></name> <name><surname>Camic</surname> <given-names>P. M.</given-names></name> <name><surname>Crutch</surname> <given-names>S. J.</given-names></name> <name><surname>West</surname> <given-names>J.</given-names></name> <name><surname>Harding</surname> <given-names>E.</given-names></name> <name><surname>Brotherhood</surname> <given-names>E.</given-names></name></person-group> (<year>2020</year>). <article-title>Using music to develop a multisensory communicative environment for people with late-stage dementia</article-title>. <source>The Gerontologist</source> <volume>60</volume>, <fpage>1115</fpage>&#x2013;<lpage>1125</lpage>. doi: <pub-id pub-id-type="doi">10.1093/geront/gnz169</pub-id>, PMID: <pub-id pub-id-type="pmid">31812993</pub-id></mixed-citation></ref>
<ref id="ref23"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Cloutier</surname> <given-names>A.</given-names></name> <name><surname>Fernandez</surname> <given-names>N. B.</given-names></name> <name><surname>Houde-Archambault</surname> <given-names>C.</given-names></name> <name><surname>Gosselin</surname> <given-names>N.</given-names></name></person-group> (<year>2020</year>). <article-title>Effect of background music on attentional control in older and young adults</article-title>. <source>Front. Psychol.</source> <volume>11</volume>:<fpage>557225</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2020.557225</pub-id>, PMID: <pub-id pub-id-type="pmid">33192813</pub-id></mixed-citation></ref>
<ref id="ref24"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Corveleyn</surname> <given-names>X.</given-names></name> <name><surname>Lopez-Moliner</surname> <given-names>J.</given-names></name> <name><surname>Coello</surname> <given-names>Y.</given-names></name></person-group> (<year>2012</year>). <article-title>Motor action reduces temporal asynchrony between perceived visual changes</article-title>. <source>J. Vis.</source> <volume>12</volume>:<fpage>20</fpage>. doi: <pub-id pub-id-type="doi">10.1167/12.11.20</pub-id>, PMID: <pub-id pub-id-type="pmid">23092948</pub-id></mixed-citation></ref>
<ref id="ref25"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Crisinel</surname> <given-names>A. S.</given-names></name> <name><surname>Jacquier</surname> <given-names>C.</given-names></name> <name><surname>Deroy</surname> <given-names>O.</given-names></name> <name><surname>Spence</surname> <given-names>C.</given-names></name></person-group> (<year>2013</year>). <article-title>Composing with cross-modal correspondences: music and odors in concert</article-title>. <source>Chemosens. Percept.</source> <volume>6</volume>, <fpage>45</fpage>&#x2013;<lpage>52</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s12078-012-9138-4</pub-id></mixed-citation></ref>
<ref id="ref26"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Darrow</surname> <given-names>A.-A.</given-names></name> <name><surname>Johnson</surname> <given-names>C.</given-names></name> <name><surname>Agnew</surname> <given-names>S.</given-names></name> <name><surname>Fuller</surname> <given-names>E. R.</given-names></name> <name><surname>Uchisaka</surname> <given-names>M.</given-names></name></person-group> (<year>2006</year>). <article-title>Effect of preferred music as a distraction on music majors&#x2019; and nonmusic majors&#x2019; selective attention</article-title>. <source>Bull. Counc. Res. Music. Educ.</source> <volume>170</volume>, <fpage>21</fpage>&#x2013;<lpage>31</lpage>. Available online at: <ext-link xlink:href="http://www.jstor.org/stable/40319346" ext-link-type="uri">http://www.jstor.org/stable/40319346</ext-link></mixed-citation></ref>
<ref id="ref27"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>De Oliveira</surname> <given-names>T. C. G.</given-names></name> <name><surname>Soares</surname> <given-names>F. C.</given-names></name> <name><surname>De Macedo</surname> <given-names>L. D. E. D.</given-names></name> <name><surname>Diniz</surname> <given-names>D. L. W. P.</given-names></name> <name><surname>Bento-Torres</surname> <given-names>N. V. O.</given-names></name> <name><surname>Pican&#x00E7;o-Diniz</surname> <given-names>C. W.</given-names></name></person-group> (<year>2014</year>). <article-title>Beneficial effects of multisensory and cognitive stimulation on age-related cognitive decline in long-term-care institutions</article-title>. <source>Clin. Interv. Aging</source> <volume>9</volume>, <fpage>309</fpage>&#x2013;<lpage>321</lpage>. doi: <pub-id pub-id-type="doi">10.2147/CIA.S54383</pub-id>, PMID: <pub-id pub-id-type="pmid">24600211</pub-id></mixed-citation></ref>
<ref id="ref28"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Dibben</surname> <given-names>N.</given-names></name> <name><surname>Williamson</surname> <given-names>V. J.</given-names></name></person-group> (<year>2007</year>). <article-title>An exploratory survey of in-vehicle music listening</article-title>. <source>Psychol. Music</source> <volume>35</volume>, <fpage>571</fpage>&#x2013;<lpage>589</lpage>. doi: <pub-id pub-id-type="doi">10.1177/0305735607079725</pub-id></mixed-citation></ref>
<ref id="ref29"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Douc&#x00E9;</surname> <given-names>L.</given-names></name> <name><surname>Janssens</surname> <given-names>W.</given-names></name></person-group> (<year>2013</year>). <article-title>The presence of a pleasant ambient scent in a fashion store</article-title>. <source>Environ. Behav.</source> <volume>45</volume>, <fpage>215</fpage>&#x2013;<lpage>238</lpage>. doi: <pub-id pub-id-type="doi">10.1177/0013916511410421</pub-id></mixed-citation></ref>
<ref id="ref30"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Egloff</surname> <given-names>B.</given-names></name> <name><surname>Schmukle</surname> <given-names>S. C.</given-names></name> <name><surname>Burns</surname> <given-names>L. R.</given-names></name> <name><surname>Schwerdtfeger</surname> <given-names>A.</given-names></name></person-group> (<year>2006</year>). <article-title>Spontaneous emotion regulation during evaluated speaking tasks: associations with negative affect, anxiety expression, memory, and physiological responding</article-title>. <source>Emotion</source> <volume>6</volume>, <fpage>356</fpage>&#x2013;<lpage>366</lpage>. doi: <pub-id pub-id-type="doi">10.1037/1528-3542.6.3.356</pub-id>, PMID: <pub-id pub-id-type="pmid">16938078</pub-id></mixed-citation></ref>
<ref id="ref31"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Emsenhuber</surname> <given-names>B.</given-names></name></person-group> (<year>2011</year>). &#x201C;<article-title>Scent marketing: making olfactory advertising pervasive</article-title>&#x201D; in <source>Pervasive advertising. Human-computer interaction series</source>. eds. <person-group person-group-type="editor"><name><surname>M&#x00FC;ller</surname> <given-names>J.</given-names></name> <name><surname>Alt</surname> <given-names>F.</given-names></name> <name><surname>Michelis</surname> <given-names>D.</given-names></name></person-group> (<publisher-name>London: Springer</publisher-name>), <fpage>343</fpage>&#x2013;<lpage>360</lpage>.</mixed-citation></ref>
<ref id="ref32"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Fulkerson</surname> <given-names>M.</given-names></name></person-group> (<year>2020</year>). &#x201C;<article-title>Philosophical insights</article-title>&#x201D; in <source>Multisensory perception: From laboratory to clinic</source>. eds. <person-group person-group-type="editor"><name><surname>Sathian</surname> <given-names>K.</given-names></name> <name><surname>Ramachandran</surname> <given-names>V. S.</given-names></name></person-group> (<publisher-name>Elsevier</publisher-name>), <fpage>41</fpage>&#x2013;<lpage>55</lpage>. doi: <pub-id pub-id-type="doi">10.1016/B978-0-12-812492-5.00002-4</pub-id></mixed-citation></ref>
<ref id="ref33"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Gao</surname> <given-names>Y.</given-names></name> <name><surname>Liu</surname> <given-names>H.</given-names></name></person-group> (<year>2023</year>). <article-title>Artificial intelligence-enabled personalization in interactive marketing: a customer journey perspective</article-title>. <source>J. Res. Interact. Mark.</source> <volume>17</volume>, <fpage>663</fpage>&#x2013;<lpage>680</lpage>. doi: <pub-id pub-id-type="doi">10.1108/JRIM-01-2022-0023</pub-id></mixed-citation></ref>
<ref id="ref34"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Goltz</surname> <given-names>F.</given-names></name> <name><surname>Sadakata</surname> <given-names>M.</given-names></name></person-group> (<year>2021</year>). <article-title>Do you listen to music while studying? A portrait of how people use music to optimize their cognitive performance</article-title>. <source>Acta Psychol.</source> <volume>220</volume>:<fpage>103417</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.actpsy.2021.103417</pub-id>, PMID: <pub-id pub-id-type="pmid">34555564</pub-id></mixed-citation></ref>
<ref id="ref35"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Gonzalez</surname> <given-names>M. F.</given-names></name> <name><surname>Aiello</surname> <given-names>J. R.</given-names></name></person-group> (<year>2019</year>). <article-title>More than meets the ear: investigating how music affects cognitive task performance</article-title>. <source>J. Exp. Psychol. Appl.</source> <volume>25</volume>, <fpage>431</fpage>&#x2013;<lpage>444</lpage>. doi: <pub-id pub-id-type="doi">10.1037/xap0000202</pub-id>, PMID: <pub-id pub-id-type="pmid">30688499</pub-id></mixed-citation></ref>
<ref id="ref36"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Grifoni</surname> <given-names>J.</given-names></name> <name><surname>Pagani</surname> <given-names>M.</given-names></name> <name><surname>Persichilli</surname> <given-names>G.</given-names></name> <name><surname>Bertoli</surname> <given-names>M.</given-names></name> <name><surname>Bevacqua</surname> <given-names>M. G.</given-names></name> <name><surname>L&#x2019;Abbate</surname> <given-names>T.</given-names></name> <etal/></person-group>. (<year>2023</year>). <article-title>Auditory personalization of EMDR treatment to relieve trauma effects: a feasibility study [EMDR+]</article-title>. <source>Brain Sci.</source> <volume>13</volume>:<fpage>1050</fpage>. doi: <pub-id pub-id-type="doi">10.3390/brainsci13071050</pub-id>, PMID: <pub-id pub-id-type="pmid">37508982</pub-id></mixed-citation></ref>
<ref id="ref37"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Gross</surname> <given-names>J. J.</given-names></name></person-group> (<year>2002</year>). <article-title>Emotion regulation: affective, cognitive, and social consequences</article-title>. <source>Psychophysiology</source> <volume>39</volume>, <fpage>281</fpage>&#x2013;<lpage>291</lpage>. doi: <pub-id pub-id-type="doi">10.1017/S0048577201393198</pub-id>, PMID: <pub-id pub-id-type="pmid">12212647</pub-id></mixed-citation></ref>
<ref id="ref38"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Guti&#x00E9;rrez</surname> <given-names>E. O. F.</given-names></name> <name><surname>Camarena</surname> <given-names>V. A. T.</given-names></name></person-group> (<year>2015</year>). <article-title>Music therapy in generalized anxiety disorder</article-title>. <source>Arts Psychother.</source> <volume>44</volume>, <fpage>19</fpage>&#x2013;<lpage>24</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.aip.2015.02.003</pub-id></mixed-citation></ref>
<ref id="ref39"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Hackl&#x00E4;nder</surname> <given-names>R. P. M.</given-names></name> <name><surname>Janssen</surname> <given-names>S. M. J.</given-names></name> <name><surname>Bermeitinger</surname> <given-names>C.</given-names></name></person-group> (<year>2019</year>). <article-title>An in-depth review of the methods, findings, and theories associated with odor-evoked autobiographical memory</article-title>. <source>Psychon. Bull. Rev.</source> <volume>26</volume>, <fpage>401</fpage>&#x2013;<lpage>429</lpage>. doi: <pub-id pub-id-type="doi">10.3758/s13423-018-1545-3</pub-id>, PMID: <pub-id pub-id-type="pmid">30406397</pub-id></mixed-citation></ref>
<ref id="ref40"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Haggard</surname> <given-names>P.</given-names></name> <name><surname>Cole</surname> <given-names>J.</given-names></name></person-group> (<year>2007</year>). <article-title>Intention, attention and the temporal experience of action</article-title>. <source>Conscious. Cogn.</source> <volume>16</volume>, <fpage>211</fpage>&#x2013;<lpage>220</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.concog.2006.07.002</pub-id>, PMID: <pub-id pub-id-type="pmid">16934490</pub-id></mixed-citation></ref>
<ref id="ref41"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Hao</surname> <given-names>Z.</given-names></name> <name><surname>Li</surname> <given-names>H.</given-names></name> <name><surname>Guo</surname> <given-names>J.</given-names></name> <name><surname>Xu</surname> <given-names>Y.</given-names></name></person-group> (<year>2025</year>). <article-title>Advances in artificial intelligence for olfaction and gustation: a comprehensive review</article-title>. <source>Artif. Intell. Rev.</source> <volume>58</volume>:<fpage>306</fpage>. doi: <pub-id pub-id-type="doi">10.1007/s10462-025-11309-4</pub-id></mixed-citation></ref>
<ref id="ref42"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Hermann</surname> <given-names>E.</given-names></name></person-group> (<year>2022</year>). <article-title>Artificial intelligence and mass personalization of communication content&#x2014;an ethical and literacy perspective</article-title>. <source>New Media Soc.</source> <volume>24</volume>, <fpage>1258</fpage>&#x2013;<lpage>1277</lpage>. doi: <pub-id pub-id-type="doi">10.1177/14614448211022702</pub-id></mixed-citation></ref>
<ref id="ref43"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Herz</surname> <given-names>R. S.</given-names></name></person-group> (<year>2002</year>). &#x201C;<article-title>Influences of odors on mood and affective cognition</article-title>&#x201D; in <source>Olfaction, taste, and cognition</source>. Eds. C. Rouby, B. Schaal, D. Dubois, R. Gervais, &#x0026; A. Holley, (<publisher-name>Cambridge University Press</publisher-name>), <fpage>160</fpage>&#x2013;<lpage>177</lpage>. doi: <pub-id pub-id-type="doi">10.1017/CBO9780511546389.016</pub-id></mixed-citation></ref>
<ref id="ref44"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Herz</surname> <given-names>R.</given-names></name></person-group> (<year>2016</year>). <article-title>The role of odor-evoked memory in psychological and physiological health</article-title>. <source>Brain Sci.</source> <volume>6</volume>:<fpage>22</fpage>. doi: <pub-id pub-id-type="doi">10.3390/brainsci6030022</pub-id>, PMID: <pub-id pub-id-type="pmid">27447673</pub-id></mixed-citation></ref>
<ref id="ref45"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Herz</surname> <given-names>R. S.</given-names></name> <name><surname>Bajec</surname> <given-names>M. R.</given-names></name></person-group> (<year>2022</year>). <article-title>Your money or your sense of smell? A comparative analysis of the sensory and psychological value of olfaction</article-title>. <source>Brain Sci.</source> <volume>12</volume>:<fpage>299</fpage>. doi: <pub-id pub-id-type="doi">10.3390/brainsci12030299</pub-id>, PMID: <pub-id pub-id-type="pmid">35326256</pub-id></mixed-citation></ref>
<ref id="ref46"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ho</surname> <given-names>C.</given-names></name> <name><surname>Spence</surname> <given-names>C.</given-names></name></person-group> (<year>2005</year>). <article-title>Olfactory facilitation of dual-task performance</article-title>. <source>Neurosci. Lett.</source> <volume>389</volume>, <fpage>35</fpage>&#x2013;<lpage>40</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neulet.2005.07.003</pub-id>, PMID: <pub-id pub-id-type="pmid">16054298</pub-id></mixed-citation></ref>
<ref id="ref47"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Huang</surname> <given-names>M.</given-names></name> <name><surname>Chen</surname> <given-names>C.</given-names></name></person-group> (<year>2023</year>). <article-title>The effects of olfactory cues as interface notifications on a mobile phone</article-title>. <source>J. Multimodal User Interfaces</source> <volume>17</volume>, <fpage>21</fpage>&#x2013;<lpage>32</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s12193-022-00399-x</pub-id></mixed-citation></ref>
<ref id="ref48"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Huang</surname> <given-names>R. H.</given-names></name> <name><surname>Shih</surname> <given-names>Y. N.</given-names></name></person-group> (<year>2011</year>). <article-title>Effects of background music on concentration of workers</article-title>. <source>Work</source> <volume>38</volume>, <fpage>383</fpage>&#x2013;<lpage>387</lpage>. doi: <pub-id pub-id-type="doi">10.3233/WOR-2011-1141</pub-id>, PMID: <pub-id pub-id-type="pmid">21508527</pub-id></mixed-citation></ref>
<ref id="ref49"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Husain</surname> <given-names>G.</given-names></name> <name><surname>Thompson</surname> <given-names>W. F.</given-names></name> <name><surname>Schellenberg</surname> <given-names>E. G.</given-names></name></person-group> (<year>2002</year>). <article-title>Effects of musical tempo and mode on arousal, mood, and spatial abilities</article-title>. <source>Music. Percept.</source> <volume>20</volume>, <fpage>151</fpage>&#x2013;<lpage>171</lpage>. doi: <pub-id pub-id-type="doi">10.1525/mp.2002.20.2.151</pub-id></mixed-citation></ref>
<ref id="ref50"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Jellinek</surname> <given-names>J. S.</given-names></name></person-group> (<year>2004</year>). <article-title>Proust remembered: has Proust&#x2019;s account of odor-cued autobiographical memory recall really been investigated?</article-title> <source>Chem. Senses</source> <volume>29</volume>, <fpage>455</fpage>&#x2013;<lpage>458</lpage>. doi: <pub-id pub-id-type="doi">10.1093/chemse/bjh043</pub-id>, PMID: <pub-id pub-id-type="pmid">15201212</pub-id></mixed-citation></ref>
<ref id="ref51"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Johansson</surname> <given-names>R.</given-names></name> <name><surname>Holmqvist</surname> <given-names>K.</given-names></name> <name><surname>Mossberg</surname> <given-names>F.</given-names></name> <name><surname>Lindgren</surname> <given-names>M.</given-names></name></person-group> (<year>2012</year>). <article-title>Eye movements and reading comprehension while listening to preferred and non-preferred study music</article-title>. <source>Psychol. Music</source> <volume>40</volume>, <fpage>339</fpage>&#x2013;<lpage>356</lpage>. doi: <pub-id pub-id-type="doi">10.1177/0305735610387777</pub-id></mixed-citation></ref>
<ref id="ref52"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Kahneman</surname> <given-names>D.</given-names></name></person-group> (<year>1973</year>). <source>Attention and effort</source>. <publisher-name>Englewood Cliffs, New Jersey: Prentice-Hall</publisher-name>. <volume>1063</volume>.</mixed-citation></ref>
<ref id="ref53"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kiss</surname> <given-names>L.</given-names></name> <name><surname>Linnell</surname> <given-names>K. J.</given-names></name></person-group> (<year>2021</year>). <article-title>The effect of preferred background music on task-focus in sustained attention</article-title>. <source>Psychol. Res.</source> <volume>85</volume>, <fpage>2313</fpage>&#x2013;<lpage>2325</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s00426-020-01400-6</pub-id>, PMID: <pub-id pub-id-type="pmid">32748062</pub-id></mixed-citation></ref>
<ref id="ref54"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kiss</surname> <given-names>L.</given-names></name> <name><surname>Szikora</surname> <given-names>B.</given-names></name> <name><surname>Linnell</surname> <given-names>K. J.</given-names></name></person-group> (<year>2024</year>). <article-title>Music in the eye of the beholder: a pupillometric study on preferred background music, attentional state, and arousal</article-title>. <source>Psychol. Res.</source> <volume>88</volume>, <fpage>1616</fpage>&#x2013;<lpage>1628</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s00426-024-01963-8</pub-id>, PMID: <pub-id pub-id-type="pmid">38652303</pub-id></mixed-citation></ref>
<ref id="ref55"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kontaris</surname> <given-names>I.</given-names></name> <name><surname>East</surname> <given-names>B. S.</given-names></name> <name><surname>Wilson</surname> <given-names>D. A.</given-names></name></person-group> (<year>2020</year>). <article-title>Behavioral and neurobiological convergence of odor, mood and emotion: a review</article-title>. <source>Front. Behav. Neurosci.</source> <volume>14</volume>:<fpage>35</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fnbeh.2020.00035</pub-id>, PMID: <pub-id pub-id-type="pmid">32210776</pub-id></mixed-citation></ref>
<ref id="ref56"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kotsopoulou</surname> <given-names>A.</given-names></name> <name><surname>Hallam</surname> <given-names>S.</given-names></name></person-group> (<year>2010</year>). <article-title>The perceived impact of playing music while studying: age and cultural differences</article-title>. <source>Educ. Stud.</source> <volume>36</volume>, <fpage>431</fpage>&#x2013;<lpage>440</lpage>. doi: <pub-id pub-id-type="doi">10.1080/03055690903424774</pub-id></mixed-citation></ref>
<ref id="ref57"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Larsson</surname> <given-names>M.</given-names></name> <name><surname>&#x00D6;berg</surname> <given-names>C.</given-names></name> <name><surname>B&#x00E4;ckman</surname> <given-names>L.</given-names></name></person-group> (<year>2006</year>). <article-title>Recollective experience in odor recognition: influences of adult age and familiarity</article-title>. <source>Psychol. Res.</source> <volume>70</volume>, <fpage>68</fpage>&#x2013;<lpage>75</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s00426-004-0190-9</pub-id>, PMID: <pub-id pub-id-type="pmid">15480757</pub-id></mixed-citation></ref>
<ref id="ref58"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Lavie</surname> <given-names>N.</given-names></name></person-group> (<year>2005</year>). <article-title>Distracted and confused? Selective attention under load</article-title>. <source>Trends Cogn. Sci.</source> <volume>9</volume>, <fpage>75</fpage>&#x2013;<lpage>82</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.tics.2004.12.004</pub-id>, PMID: <pub-id pub-id-type="pmid">15668100</pub-id></mixed-citation></ref>
<ref id="ref59"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Lavie</surname> <given-names>N.</given-names></name></person-group> (<year>2010</year>). <article-title>Attention, distraction, and cognitive control under load</article-title>. <source>Curr. Dir. Psychol. Sci.</source> <volume>19</volume>, <fpage>143</fpage>&#x2013;<lpage>148</lpage>. doi: <pub-id pub-id-type="doi">10.1177/0963721410370295</pub-id></mixed-citation></ref>
<ref id="ref60"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Lee</surname> <given-names>J. P.</given-names></name> <name><surname>Jang</surname> <given-names>H.</given-names></name> <name><surname>Jang</surname> <given-names>Y.</given-names></name> <name><surname>Song</surname> <given-names>H.</given-names></name> <name><surname>Lee</surname> <given-names>S.</given-names></name> <name><surname>Lee</surname> <given-names>P. S.</given-names></name> <etal/></person-group>. (<year>2024</year>). <article-title>Encoding of multi-modal emotional information via personalized skin-integrated wireless facial Interface</article-title>. <source>Nat. Commun.</source> <volume>15</volume>:<fpage>530</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41467-023-44673-2</pub-id>, PMID: <pub-id pub-id-type="pmid">38225246</pub-id></mixed-citation></ref>
<ref id="ref61"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Lombion</surname> <given-names>S.</given-names></name> <name><surname>Comte</surname> <given-names>A.</given-names></name> <name><surname>Tatu</surname> <given-names>L.</given-names></name> <name><surname>Brand</surname> <given-names>G.</given-names></name> <name><surname>Moulin</surname> <given-names>T.</given-names></name> <name><surname>Millot</surname> <given-names>J.</given-names></name></person-group> (<year>2009</year>). <article-title>Patterns of cerebral activation during olfactory and trigeminal stimulations</article-title>. <source>Hum. Brain Mapp.</source> <volume>30</volume>, <fpage>821</fpage>&#x2013;<lpage>828</lpage>. doi: <pub-id pub-id-type="doi">10.1002/hbm.20548</pub-id>, PMID: <pub-id pub-id-type="pmid">18330871</pub-id></mixed-citation></ref>
<ref id="ref62"><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Maggioni</surname> <given-names>E.</given-names></name> <name><surname>Cobden</surname> <given-names>R.</given-names></name> <name><surname>Dmitrenko</surname> <given-names>D.</given-names></name> <name><surname>Obrist</surname> <given-names>M.</given-names></name></person-group> (<year>2018</year>). &#x201C;Smell-O-Message.&#x201D; <italic>Proceedings of the 20th ACM international conference on multimodal interaction</italic>, 45&#x2013;54.</mixed-citation></ref>
<ref id="ref63"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Maggioni</surname> <given-names>E.</given-names></name> <name><surname>Cobden</surname> <given-names>R.</given-names></name> <name><surname>Obrist</surname> <given-names>M.</given-names></name></person-group> (<year>2019</year>). <article-title>Owidgets: a toolkit to enable smell-based experience design</article-title>. <source>Int. J. Hum.-Comput. Stud.</source> <volume>130</volume>, <fpage>248</fpage>&#x2013;<lpage>260</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.ijhcs.2019.06.014</pub-id></mixed-citation></ref>
<ref id="ref64"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Majid</surname> <given-names>A.</given-names></name></person-group> (<year>2015</year>). <article-title>Cultural factors shape olfactory language</article-title>. <source>Trends Cogn. Sci.</source> <volume>19</volume>, <fpage>629</fpage>&#x2013;<lpage>630</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.tics.2015.06.009</pub-id>, PMID: <pub-id pub-id-type="pmid">26440119</pub-id></mixed-citation></ref>
<ref id="ref65"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Maseda</surname> <given-names>A.</given-names></name> <name><surname>Cibeira</surname> <given-names>N.</given-names></name> <name><surname>Lorenzo-L&#x00F3;pez</surname> <given-names>L.</given-names></name> <name><surname>Gonz&#x00E1;lez-Abraldes</surname> <given-names>I.</given-names></name> <name><surname>Buj&#x00E1;n</surname> <given-names>A.</given-names></name> <name><surname>de Labra</surname> <given-names>C.</given-names></name> <etal/></person-group>. (<year>2018</year>). <article-title>Multisensory stimulation and individualized music sessions on older adults with severe dementia: effects on mood, behavior, and biomedical parameters</article-title>. <source>J Alzheimer's Dis</source> <volume>63</volume>, <fpage>1415</fpage>&#x2013;<lpage>1425</lpage>. doi: <pub-id pub-id-type="doi">10.3233/JAD-180109</pub-id>, PMID: <pub-id pub-id-type="pmid">29843244</pub-id></mixed-citation></ref>
<ref id="ref66"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Matsukura</surname> <given-names>H.</given-names></name> <name><surname>Yoneda</surname> <given-names>T.</given-names></name> <name><surname>Ishida</surname> <given-names>H.</given-names></name></person-group> (<year>2013</year>). <article-title>Smelling screen: development and evaluation of an olfactory display system for presenting a virtual odor source</article-title>. <source>IEEE Trans. Vis. Comput. Graph.</source> <volume>19</volume>, <fpage>606</fpage>&#x2013;<lpage>615</lpage>. doi: <pub-id pub-id-type="doi">10.1109/TVCG.2013.40</pub-id>, PMID: <pub-id pub-id-type="pmid">23428445</pub-id></mixed-citation></ref>
<ref id="ref67"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Matz</surname> <given-names>S. C.</given-names></name> <name><surname>Kosinski</surname> <given-names>M.</given-names></name> <name><surname>Nave</surname> <given-names>G.</given-names></name> <name><surname>Stillwell</surname> <given-names>D. J.</given-names></name></person-group> (<year>2017</year>). <article-title>Psychological targeting as an effective approach to digital mass persuasion</article-title>. <source>Proc. Natl. Acad. Sci.</source> <volume>114</volume>, <fpage>12714</fpage>&#x2013;<lpage>12719</lpage>. doi: <pub-id pub-id-type="doi">10.1073/pnas.1710966114</pub-id>, PMID: <pub-id pub-id-type="pmid">29133409</pub-id></mixed-citation></ref>
<ref id="ref68"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Matz</surname> <given-names>S. C.</given-names></name> <name><surname>Netzer</surname> <given-names>O.</given-names></name></person-group> (<year>2017</year>). <article-title>Using big data as a window into consumers&#x2019; psychology</article-title>. <source>Curr. Opin. Behav. Sci.</source> <volume>18</volume>, <fpage>7</fpage>&#x2013;<lpage>12</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cobeha.2017.05.009</pub-id></mixed-citation></ref>
<ref id="ref69"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Mazziotti</surname> <given-names>G.</given-names></name> <name><surname>Ranaivoson</surname> <given-names>H.</given-names></name></person-group> (<year>2024</year>). <article-title>Can online music platforms be fair? An interdisciplinary research manifesto</article-title>. <source>IIC - Int. Rev. Intellect. Prop. Compet. Law</source> <volume>55</volume>, <fpage>249</fpage>&#x2013;<lpage>279</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s40319-023-01420-w</pub-id></mixed-citation></ref>
<ref id="ref70"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>McGurk</surname> <given-names>H.</given-names></name> <name><surname>MacDonald</surname> <given-names>J.</given-names></name></person-group> (<year>1976</year>). <article-title>Hearing lips and seeing voices</article-title>. <source>Nature</source> <volume>264</volume>, <fpage>746</fpage>&#x2013;<lpage>748</lpage>. doi: <pub-id pub-id-type="doi">10.1038/264746a0</pub-id>, PMID: <pub-id pub-id-type="pmid">1012311</pub-id></mixed-citation></ref>
<ref id="ref71"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Mohan</surname> <given-names>A.</given-names></name> <name><surname>Thomas</surname> <given-names>E.</given-names></name></person-group> (<year>2020</year>). <article-title>Effect of background music and the cultural preference to music on adolescents&#x2019; task performance</article-title>. <source>Int. J. Adolesc. Youth</source> <volume>25</volume>, <fpage>562</fpage>&#x2013;<lpage>573</lpage>. doi: <pub-id pub-id-type="doi">10.1080/02673843.2019.1689368</pub-id></mixed-citation></ref>
<ref id="ref72"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Moore</surname> <given-names>K. S.</given-names></name></person-group> (<year>2013</year>). <article-title>A systematic review on the neural effects of music on emotion regulation: implications for music therapy practice</article-title>. <source>J. Music. Ther.</source> <volume>50</volume>, <fpage>198</fpage>&#x2013;<lpage>242</lpage>. doi: <pub-id pub-id-type="doi">10.1093/jmt/50.3.198</pub-id>, PMID: <pub-id pub-id-type="pmid">24568004</pub-id></mixed-citation></ref>
<ref id="ref73"><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Mori</surname> <given-names>F.</given-names></name> <name><surname>Naghsh</surname> <given-names>F. A.</given-names></name> <name><surname>Tezuka</surname> <given-names>T.</given-names></name></person-group> (<year>2014</year>). &#x201C;The effect of music on the level of mental concentration and its temporal change.&#x201D; in <italic>Proceedings of the 6th international conference on computer supported education</italic>, 1, pp. 34&#x2013;42.</mixed-citation></ref>
<ref id="ref74"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Moss</surname> <given-names>M.</given-names></name> <name><surname>Hewitt</surname> <given-names>S.</given-names></name> <name><surname>Moss</surname> <given-names>L.</given-names></name> <name><surname>Wesnes</surname> <given-names>K.</given-names></name></person-group> (<year>2008</year>). <article-title>Modulation of cognitive performance and mood by aromas of peppermint and ylang-ylang</article-title>. <source>Int. J. Neurosci.</source> <volume>118</volume>, <fpage>59</fpage>&#x2013;<lpage>77</lpage>. doi: <pub-id pub-id-type="doi">10.1080/00207450601042094</pub-id>, PMID: <pub-id pub-id-type="pmid">18041606</pub-id></mixed-citation></ref>
<ref id="ref75"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Motoki</surname> <given-names>K.</given-names></name> <name><surname>Marks</surname> <given-names>L. E.</given-names></name> <name><surname>Velasco</surname> <given-names>C.</given-names></name></person-group> (<year>2023</year>). <article-title>Reflections on cross-modal correspondences: current understanding and issues for future research</article-title>. <source>Multisens. Res.</source> <volume>37</volume>, <fpage>1</fpage>&#x2013;<lpage>23</lpage>. doi: <pub-id pub-id-type="doi">10.1163/22134808-bja10114</pub-id>, PMID: <pub-id pub-id-type="pmid">37963487</pub-id></mixed-citation></ref>
<ref id="ref76"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Nadon</surname> <given-names>&#x00C9;.</given-names></name> <name><surname>Tillmann</surname> <given-names>B.</given-names></name> <name><surname>Saj</surname> <given-names>A.</given-names></name> <name><surname>Gosselin</surname> <given-names>N.</given-names></name></person-group> (<year>2021</year>). <article-title>The emotional effect of background music on selective attention of adults</article-title>. <source>Front. Psychol.</source> <volume>12</volume>:<fpage>729037</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2021.729037</pub-id>, PMID: <pub-id pub-id-type="pmid">34671300</pub-id></mixed-citation></ref>
<ref id="ref77"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Nemati</surname> <given-names>S.</given-names></name> <name><surname>Akrami</surname> <given-names>H.</given-names></name> <name><surname>Salehi</surname> <given-names>S.</given-names></name> <name><surname>Esteky</surname> <given-names>H.</given-names></name> <name><surname>Moghimi</surname> <given-names>S.</given-names></name></person-group> (<year>2019</year>). <article-title>Lost in music: neural signature of pleasure and its role in modulating attentional resources</article-title>. <source>Brain Res.</source> <volume>1711</volume>, <fpage>7</fpage>&#x2013;<lpage>15</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.brainres.2019.01.011</pub-id>, PMID: <pub-id pub-id-type="pmid">30629944</pub-id></mixed-citation></ref>
<ref id="ref78"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Nibbe</surname> <given-names>N.</given-names></name> <name><surname>Orth</surname> <given-names>U. R.</given-names></name></person-group> (<year>2017</year>). &#x201C;<article-title>Odor in marketing</article-title>&#x201D; in <source>Springer Handbook of Odor</source>. ed.  A. Buettne (<publisher-name>Springer International Publishing</publisher-name>), <fpage>141</fpage>&#x2013;<lpage>142</lpage>. doi: <pub-id pub-id-type="doi">10.1007/978-3-319-26932-0_56</pub-id></mixed-citation></ref>
<ref id="ref79"><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Obrist</surname> <given-names>M.</given-names></name> <name><surname>Boyle</surname> <given-names>G.</given-names></name> <name><surname>van Brakel</surname> <given-names>M.</given-names></name> <name><surname>Duerinck</surname> <given-names>F.</given-names></name></person-group> (<year>2017</year>). &#x201C;Multisensory experiences and spaces.&#x201D; in <italic>Proceedings of the 2017 ACM international conference on interactive surfaces and spaces</italic>, pp. 469&#x2013;472.</mixed-citation></ref>
<ref id="ref80"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Panahi</surname> <given-names>O.</given-names></name></person-group> (<year>2025</year>). <article-title>Wearable sensors and personalized sustainability: monitoring health and environmental exposures in real-time</article-title>. <source>Europ. J. Innov. Stud. Sustain.</source> <volume>1</volume>, <fpage>11</fpage>&#x2013;<lpage>19</lpage>. doi: <pub-id pub-id-type="doi">10.59324/ejiss.2025.1(2).02</pub-id></mixed-citation></ref>
<ref id="ref81"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Paniagua-G&#x00F3;mez</surname> <given-names>M.</given-names></name> <name><surname>Fernandez-Carmona</surname> <given-names>M.</given-names></name></person-group> (<year>2025</year>). <article-title>Trends and challenges in real-time stress detection and modulation: the role of the IoT and artificial intelligence</article-title>. <source>Electronics</source> <volume>14</volume>:<fpage>2581</fpage>. doi: <pub-id pub-id-type="doi">10.3390/electronics14132581</pub-id></mixed-citation></ref>
<ref id="ref82"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Pardini</surname> <given-names>S.</given-names></name> <name><surname>Gabrielli</surname> <given-names>S.</given-names></name> <name><surname>Dianti</surname> <given-names>M.</given-names></name> <name><surname>Novara</surname> <given-names>C.</given-names></name> <name><surname>Zucco</surname> <given-names>G.</given-names></name> <name><surname>Mich</surname> <given-names>O.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>The role of personalization in the user experience, preferences and engagement with virtual reality environments for relaxation</article-title>. <source>Int. J. Environ. Res. Public Health</source> <volume>19</volume>:<fpage>7237</fpage>. doi: <pub-id pub-id-type="doi">10.3390/ijerph19127237</pub-id>, PMID: <pub-id pub-id-type="pmid">35742483</pub-id></mixed-citation></ref>
<ref id="ref83"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Parker</surname> <given-names>M.</given-names></name> <name><surname>Spennemann</surname> <given-names>D. H. R.</given-names></name> <name><surname>Bond</surname> <given-names>J.</given-names></name></person-group> (<year>2024</year>). <article-title>Sensory and multisensory perception&#x2014;perspectives toward defining multisensory experience and heritage</article-title>. <source>J. Sens. Stud.</source> <volume>39</volume>:<fpage>12940</fpage>. doi: <pub-id pub-id-type="doi">10.1111/joss.12940</pub-id></mixed-citation></ref>
<ref id="ref84"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Pelagatti</surname> <given-names>C.</given-names></name> <name><surname>Blini</surname> <given-names>E.</given-names></name> <name><surname>Vannucci</surname> <given-names>M.</given-names></name></person-group> (<year>2025</year>). <article-title>Catching mind wandering with pupillometry: conceptual and methodological challenges</article-title>. <source>WIREs Cogn. Sci.</source> <volume>16</volume>:<fpage>1695</fpage>. doi: <pub-id pub-id-type="doi">10.1002/wcs.1695</pub-id>, PMID: <pub-id pub-id-type="pmid">39435485</pub-id></mixed-citation></ref>
<ref id="ref85"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Pergantis</surname> <given-names>P.</given-names></name> <name><surname>Bamicha</surname> <given-names>V.</given-names></name> <name><surname>Skianis</surname> <given-names>C.</given-names></name> <name><surname>Drigas</surname> <given-names>A.</given-names></name></person-group> (<year>2025</year>). <article-title>AI Chatbots and cognitive control: enhancing executive functions through Chatbot interactions: a systematic review</article-title>. <source>Brain Sci.</source> <volume>15</volume>:<fpage>47</fpage>. doi: <pub-id pub-id-type="doi">10.3390/brainsci15010047</pub-id>, PMID: <pub-id pub-id-type="pmid">39851415</pub-id></mixed-citation></ref>
<ref id="ref86"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Perham</surname> <given-names>N.</given-names></name> <name><surname>Sykora</surname> <given-names>M.</given-names></name></person-group> (<year>2012</year>). <article-title>Disliked music can be better for performance than liked music</article-title>. <source>Appl. Cogn. Psychol.</source> <volume>26</volume>, <fpage>550</fpage>&#x2013;<lpage>555</lpage>. doi: <pub-id pub-id-type="doi">10.1002/acp.2826</pub-id></mixed-citation></ref>
<ref id="ref87"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Pizzoli</surname> <given-names>S. F. M.</given-names></name> <name><surname>Monzani</surname> <given-names>D.</given-names></name> <name><surname>Mazzocco</surname> <given-names>K.</given-names></name> <name><surname>Maggioni</surname> <given-names>E.</given-names></name> <name><surname>Pravettoni</surname> <given-names>G.</given-names></name></person-group> (<year>2022</year>). <article-title>The power of odor persuasion: the incorporation of olfactory cues in virtual environments for personalized relaxation</article-title>. <source>Perspect. Psychol. Sci.</source> <volume>17</volume>, <fpage>652</fpage>&#x2013;<lpage>661</lpage>. doi: <pub-id pub-id-type="doi">10.1177/17456916211014196</pub-id>, PMID: <pub-id pub-id-type="pmid">34752166</pub-id></mixed-citation></ref>
<ref id="ref88"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Plailly</surname> <given-names>J.</given-names></name> <name><surname>Tillmann</surname> <given-names>B.</given-names></name> <name><surname>Royet</surname> <given-names>J. P.</given-names></name></person-group> (<year>2007</year>). <article-title>The feeling of familiarity of music and odors: the same neural signature?</article-title> <source>Cereb. Cortex</source> <volume>17</volume>, <fpage>2650</fpage>&#x2013;<lpage>2658</lpage>. doi: <pub-id pub-id-type="doi">10.1093/cercor/bhl173</pub-id>, PMID: <pub-id pub-id-type="pmid">17289777</pub-id></mixed-citation></ref>
<ref id="ref89"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Poell</surname> <given-names>T.</given-names></name> <name><surname>Nieborg</surname> <given-names>D.</given-names></name> <name><surname>van Dijck</surname> <given-names>J.</given-names></name></person-group> (<year>2019</year>). <article-title>Platformisation</article-title>. <source>Internet Policy Rev.</source> <volume>8</volume>:<fpage>1425</fpage>. doi: <pub-id pub-id-type="doi">10.14763/2019.4.1425</pub-id></mixed-citation></ref>
<ref id="ref90"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Poerio</surname> <given-names>G. L.</given-names></name> <name><surname>Sormaz</surname> <given-names>M.</given-names></name> <name><surname>Wang</surname> <given-names>H.-T.</given-names></name> <name><surname>Margulies</surname> <given-names>D.</given-names></name> <name><surname>Jefferies</surname> <given-names>E.</given-names></name> <name><surname>Smallwood</surname> <given-names>J.</given-names></name></person-group> (<year>2017</year>). <article-title>The role of the default mode network in component processes underlying the wandering mind</article-title>. <source>Soc. Cogn. Affect. Neurosci.</source> <volume>12</volume>, <fpage>1047</fpage>&#x2013;<lpage>1062</lpage>. doi: <pub-id pub-id-type="doi">10.1093/scan/nsx041</pub-id>, PMID: <pub-id pub-id-type="pmid">28402561</pub-id></mixed-citation></ref>
<ref id="ref91"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Pursey</surname> <given-names>T.</given-names></name> <name><surname>Lomas</surname> <given-names>D.</given-names></name></person-group> (<year>2018</year>). <article-title>Tate sensorium: an experiment in multisensory immersive design</article-title>. <source>Senses Soc.</source> <volume>13</volume>, <fpage>354</fpage>&#x2013;<lpage>366</lpage>. doi: <pub-id pub-id-type="doi">10.1080/17458927.2018.1516026</pub-id></mixed-citation></ref>
<ref id="ref92"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Rafieian</surname> <given-names>O.</given-names></name> <name><surname>Yoganarasimhan</surname> <given-names>H.</given-names></name></person-group> (<year>2023</year>). &#x201C;<article-title>AI and personalization</article-title>&#x201D; in <source>Artificial intelligence in marketing</source>. eds. <person-group person-group-type="editor"><name><surname>Sudhir</surname> <given-names>K.</given-names></name> <name><surname>Toubia</surname> <given-names>O.</given-names></name></person-group>, (<publisher-name>Leeds, United Kingdom: Emerald Publishing Limited</publisher-name>), <volume>20</volume>:<fpage>77</fpage>&#x2013;<lpage>102</lpage>.</mixed-citation></ref>
<ref id="ref93"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ramachandran</surname> <given-names>V. S.</given-names></name> <name><surname>Hubbard</surname> <given-names>E. M.</given-names></name></person-group> (<year>2001</year>). <article-title>Synaesthesia - a window into perception, thought and language</article-title>. <source>J. Conscious. Stud.</source> <volume>8</volume>, <fpage>3</fpage>&#x2013;<lpage>34</lpage>.</mixed-citation></ref>
<ref id="ref94"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Reid</surname> <given-names>T.</given-names></name> <name><surname>Nielson</surname> <given-names>C.</given-names></name> <name><surname>Wormwood</surname> <given-names>J. B.</given-names></name></person-group> (<year>2025</year>). <article-title>Measuring arousal: promises and pitfalls</article-title>. <source>Affect. Sci.</source> <volume>6</volume>, <fpage>369</fpage>&#x2013;<lpage>379</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s42761-024-00288-4</pub-id>, PMID: <pub-id pub-id-type="pmid">40605947</pub-id></mixed-citation></ref>
<ref id="ref9002"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Rekow</surname> <given-names>D.</given-names></name> <name><surname>Leleu</surname> <given-names>A.</given-names></name></person-group> (<year>2023</year>). &#x201C;<article-title>Tips from the Nose: Odor-Driven Visual Categorization in the Developing Human Brain</article-title>.&#x201D; in <source>Chemical Signals in Vertebrates 15. CSiV 2021</source>. Eds. <person-group person-group-type="editor"><name><surname>Schaal</surname> <given-names>B.</given-names></name> <name><surname>Rekow</surname> <given-names>D.</given-names></name> <name><surname>Keller</surname> <given-names>M.</given-names></name> <name><surname>Damon</surname> <given-names>F.</given-names></name></person-group>. <publisher-name>Springer</publisher-name>, <publisher-loc>Cham</publisher-loc>. Available online at: doi: <pub-id pub-id-type="doi">10.1007/978-3-031-35159-4_18</pub-id></mixed-citation></ref>
<ref id="ref95"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Rentfrow</surname> <given-names>P. J.</given-names></name></person-group> (<year>2012</year>). <article-title>The role of music in everyday life: current directions in the social psychology of music</article-title>. <source>Soc. Personal. Psychol. Compass</source> <volume>6</volume>, <fpage>402</fpage>&#x2013;<lpage>416</lpage>. doi: <pub-id pub-id-type="doi">10.1111/j.1751-9004.2012.00434.x</pub-id></mixed-citation></ref>
<ref id="ref96"><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Rey</surname> <given-names>L.</given-names></name> <name><surname>D&#x00E9;soche</surname> <given-names>C.</given-names></name> <name><surname>Th&#x00E9;venet</surname> <given-names>M.</given-names></name> <name><surname>Garcia</surname> <given-names>S.</given-names></name> <name><surname>Tillmann</surname> <given-names>B.</given-names></name> <name><surname>Plailly</surname> <given-names>J.</given-names></name></person-group> (<year>2023</year>). <source>&#x201C;Characterizing Emotional Response to Olfactory, Auditory, and Visual Stimulations in a Virtual Reality Environment,&#x201D; in Basic Protocols on Emotions, Senses, and Foods. Ed. M. Bensafi (Springer US)</source>, <fpage>159</fpage>&#x2013;<lpage>174</lpage>. doi: <pub-id pub-id-type="doi">10.1007/978-1-0716-2934-5_13</pub-id></mixed-citation></ref>
<ref id="ref97"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Sakamoto</surname> <given-names>M.</given-names></name> <name><surname>Ando</surname> <given-names>H.</given-names></name> <name><surname>Tsutou</surname> <given-names>A.</given-names></name></person-group> (<year>2013</year>). <article-title>Comparing the effects of different individualized music interventions for elderly individuals with severe dementia</article-title>. <source>Int. Psychogeriatr.</source> <volume>25</volume>, <fpage>775</fpage>&#x2013;<lpage>784</lpage>. doi: <pub-id pub-id-type="doi">10.1017/S1041610212002256</pub-id>, PMID: <pub-id pub-id-type="pmid">23298693</pub-id></mixed-citation></ref>
<ref id="ref98"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Sathian</surname> <given-names>K.</given-names></name> <name><surname>Ramachandran</surname> <given-names>V. S.</given-names></name></person-group> (<year>2020</year>). <source>Multisensory perception: From laboratory to clinic</source>. <publisher-name>San Diego: Elsevier</publisher-name>. doi: <pub-id pub-id-type="doi">10.1016/C2016-0-03465-4</pub-id></mixed-citation></ref>
<ref id="ref9003"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Schaal</surname> <given-names>B.</given-names></name> <name><surname>Saxton</surname> <given-names>T. K.</given-names></name> <name><surname>Loos</surname> <given-names>H.</given-names></name> <name><surname>Soussignan</surname> <given-names>R.</given-names></name> <name><surname>Durand</surname> <given-names>K.</given-names></name></person-group> (<year>2020</year>). <article-title>Olfaction scaffolds the developing human from neonate to adolescent and beyond</article-title>. <source>Phil. Trans. R. Soc. B</source> <volume>375</volume>:<fpage>20190261</fpage>. doi: <pub-id pub-id-type="doi">10.1098/rstb.2019.0261</pub-id></mixed-citation></ref>
<ref id="ref99"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Schifferstein</surname> <given-names>H. N. J.</given-names></name></person-group> (<year>2006</year>). <article-title>The perceived importance of sensory modalities in product usage: a study of self-reports</article-title>. <source>Acta Psychol.</source> <volume>121</volume>, <fpage>41</fpage>&#x2013;<lpage>64</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.actpsy.2005.06.004</pub-id>, PMID: <pub-id pub-id-type="pmid">16098945</pub-id></mixed-citation></ref>
<ref id="ref100"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Schwambergov&#x00E1;</surname> <given-names>D.</given-names></name> <name><surname>T&#x0159;ebick&#x00E1; Fialov&#x00E1;</surname> <given-names>J.</given-names></name> <name><surname>Havl&#x00ED;&#x010D;ek</surname> <given-names>J.</given-names></name></person-group> (<year>2024</year>). <article-title>Olfactory self-inspection: own body odour provides cues to one&#x2019;s health and hygiene status</article-title>. <source>Physiol. Behav.</source> <volume>275</volume>:<fpage>114449</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.physbeh.2023.114449</pub-id>, PMID: <pub-id pub-id-type="pmid">38135110</pub-id></mixed-citation></ref>
<ref id="ref101"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Seo</surname> <given-names>H. S.</given-names></name> <name><surname>Hummel</surname> <given-names>T.</given-names></name></person-group> (<year>2011</year>). <article-title>Auditory-olfactory integration: congruent or pleasant sounds amplify odor pleasantness</article-title>. <source>Chem. Senses</source> <volume>36</volume>, <fpage>301</fpage>&#x2013;<lpage>309</lpage>. doi: <pub-id pub-id-type="doi">10.1093/chemse/bjq129</pub-id>, PMID: <pub-id pub-id-type="pmid">21163913</pub-id></mixed-citation></ref>
<ref id="ref102"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Seo</surname> <given-names>H.-S.</given-names></name> <name><surname>Lohse</surname> <given-names>F.</given-names></name> <name><surname>Luckett</surname> <given-names>C. R.</given-names></name> <name><surname>Hummel</surname> <given-names>T.</given-names></name></person-group> (<year>2014</year>). <article-title>Congruent sound can modulate odor pleasantness</article-title>. <source>Chem. Senses</source> <volume>39</volume>, <fpage>215</fpage>&#x2013;<lpage>228</lpage>. doi: <pub-id pub-id-type="doi">10.1093/chemse/bjt070</pub-id>, PMID: <pub-id pub-id-type="pmid">24368256</pub-id></mixed-citation></ref>
<ref id="ref103"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Shallice</surname> <given-names>T.</given-names></name></person-group> (<year>1988</year>) in <source>From neuropsychology to mental structure</source>. ed. <person-group person-group-type="editor"><name><surname>Shallice</surname> <given-names>T.</given-names></name></person-group> (<publisher-name>Cambridge: Cambridge University Press</publisher-name>).</mixed-citation></ref>
<ref id="ref104"><mixed-citation publication-type="other"><person-group person-group-type="author"><name><surname>Sinha</surname> <given-names>A.</given-names></name> <name><surname>Sharma</surname> <given-names>V.</given-names></name> <name><surname>Gupta</surname> <given-names>M.</given-names></name> <name><surname>Rao</surname> <given-names>G. M.</given-names></name> <name><surname>Raj</surname> <given-names>A.</given-names></name> <name><surname>Kumari</surname> <given-names>A.</given-names></name> <etal/></person-group>. (<year>2023</year>). &#x201C;Smell technology: advancements and prospects in digital scent technology and fragrance algorithms.&#x201D; in <italic>2023 14th international conference on computing communication and networking technologies (ICCCNT)</italic>, pp. 1&#x2013;7.</mixed-citation></ref>
<ref id="ref105"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Smallwood</surname> <given-names>J.</given-names></name> <name><surname>Schooler</surname> <given-names>J. W.</given-names></name></person-group> (<year>2015</year>). <article-title>The science of mind wandering: empirically navigating the stream of consciousness</article-title>. <source>Annu. Rev. Psychol.</source> <volume>66</volume>, <fpage>487</fpage>&#x2013;<lpage>518</lpage>. doi: <pub-id pub-id-type="doi">10.1146/annurev-psych-010814-015331</pub-id>, PMID: <pub-id pub-id-type="pmid">25293689</pub-id></mixed-citation></ref>
<ref id="ref106"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Smith</surname> <given-names>V.</given-names></name> <name><surname>Mitchell</surname> <given-names>D. J.</given-names></name> <name><surname>Duncan</surname> <given-names>J.</given-names></name></person-group> (<year>2018</year>). <article-title>Role of the default mode network in cognitive transitions</article-title>. <source>Cereb. Cortex</source> <volume>28</volume>, <fpage>3685</fpage>&#x2013;<lpage>3696</lpage>. doi: <pub-id pub-id-type="doi">10.1093/cercor/bhy167</pub-id>, PMID: <pub-id pub-id-type="pmid">30060098</pub-id></mixed-citation></ref>
<ref id="ref107"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Solc&#x00E0;</surname> <given-names>M.</given-names></name> <name><surname>Krishna</surname> <given-names>V.</given-names></name> <name><surname>Young</surname> <given-names>N.</given-names></name> <name><surname>Deogaonkar</surname> <given-names>M.</given-names></name> <name><surname>Herbelin</surname> <given-names>B.</given-names></name> <name><surname>Orepic</surname> <given-names>P.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>Enhancing analgesic spinal cord stimulation for chronic pain with personalized immersive virtual reality</article-title>. <source>Pain</source> <volume>162</volume>, <fpage>1641</fpage>&#x2013;<lpage>1649</lpage>. doi: <pub-id pub-id-type="doi">10.1097/j.pain.0000000000002160</pub-id>, PMID: <pub-id pub-id-type="pmid">33259460</pub-id></mixed-citation></ref>
<ref id="ref108"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Spence</surname> <given-names>C.</given-names></name></person-group> (<year>2015</year>). <article-title>Leading the consumer by the nose: on the commercialization of olfactory design for the food and beverage sector</article-title>. <source>Flavour</source> <volume>4</volume>:<fpage>31</fpage>. doi: <pub-id pub-id-type="doi">10.1186/s13411-015-0041-1</pub-id></mixed-citation></ref>
<ref id="ref109"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Spence</surname> <given-names>C.</given-names></name></person-group> (<year>2020a</year>). <article-title>Olfactory-colour crossmodal correspondences in art, science, and design</article-title>. <source>Cogn. Res. Princ. Implic.</source> <volume>5</volume>:<fpage>52</fpage>. doi: <pub-id pub-id-type="doi">10.1186/s41235-020-00246-1</pub-id>, PMID: <pub-id pub-id-type="pmid">33113051</pub-id></mixed-citation></ref>
<ref id="ref110"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Spence</surname> <given-names>C.</given-names></name></person-group> (<year>2020b</year>). <article-title>Scent and the cinema</article-title>. <source>I-Perception</source> <volume>11</volume>:<fpage>204166952096971</fpage>. doi: <pub-id pub-id-type="doi">10.1177/2041669520969710</pub-id></mixed-citation></ref>
<ref id="ref111"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Spence</surname> <given-names>C.</given-names></name></person-group> (<year>2020c</year>). <article-title>Senses of place: architectural design for the multisensory mind</article-title>. <source>Cogn. Res. Princ. Implic.</source> <volume>5</volume>:<fpage>46</fpage>. doi: <pub-id pub-id-type="doi">10.1186/s41235-020-00243-4</pub-id>, PMID: <pub-id pub-id-type="pmid">32945978</pub-id></mixed-citation></ref>
<ref id="ref9001"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Spence</surname> <given-names>C.</given-names></name></person-group> (<year>2020d</year>). <article-title>Using ambient scent to enhance well-being in the multisensory built environment</article-title>.  <source>Front. Psychol.</source> <volume>11</volume>:<fpage>598859</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2020.598859</pub-id></mixed-citation></ref>
<ref id="ref112"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Spence</surname> <given-names>C.</given-names></name></person-group> (<year>2021a</year>). <article-title>Musical scents: on the surprising absence of scented musical/auditory events, entertainments, and experiences</article-title>. <source>I-Perception</source> <volume>12</volume>:<fpage>204166952110387</fpage>. doi: <pub-id pub-id-type="doi">10.1177/20416695211038747</pub-id>, PMID: <pub-id pub-id-type="pmid">34589196</pub-id></mixed-citation></ref>
<ref id="ref113"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Spence</surname> <given-names>C.</given-names></name></person-group> (<year>2021b</year>). <article-title>Scent in the context of live performance</article-title>. <source>I-Perception</source> <volume>12</volume>:<fpage>204166952098553</fpage>. doi: <pub-id pub-id-type="doi">10.1177/2041669520985537</pub-id>, PMID: <pub-id pub-id-type="pmid">33613954</pub-id></mixed-citation></ref>
<ref id="ref114"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Spence</surname> <given-names>C.</given-names></name></person-group> (<year>2022</year>). <article-title>Sensehacking the guest&#x2019;s multisensory hotel experience</article-title>. <source>Front. Psychol.</source> <volume>13</volume>:<fpage>1014818</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2022.1014818</pub-id>, PMID: <pub-id pub-id-type="pmid">36600704</pub-id></mixed-citation></ref>
<ref id="ref115"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Spence</surname> <given-names>C.</given-names></name> <name><surname>Levitan</surname> <given-names>C. A.</given-names></name> <name><surname>Shankar</surname> <given-names>M. U.</given-names></name> <name><surname>Zampini</surname> <given-names>M.</given-names></name></person-group> (<year>2010</year>). <article-title>Does food color influence taste and flavor perception in humans?</article-title> <source>Chemosens. Percept.</source> <volume>3</volume>, <fpage>68</fpage>&#x2013;<lpage>84</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s12078-010-9067-z</pub-id></mixed-citation></ref>
<ref id="ref116"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Spreng</surname> <given-names>R. N.</given-names></name> <name><surname>Mar</surname> <given-names>R. A.</given-names></name> <name><surname>Kim</surname> <given-names>A. S. N.</given-names></name></person-group> (<year>2009</year>). <article-title>The common neural basis of autobiographical memory, prospection, navigation, theory of mind, and the default mode: a quantitative Meta-analysis</article-title>. <source>J. Cogn. Neurosci.</source> <volume>21</volume>, <fpage>489</fpage>&#x2013;<lpage>510</lpage>. doi: <pub-id pub-id-type="doi">10.1162/jocn.2008.21029</pub-id>, PMID: <pub-id pub-id-type="pmid">18510452</pub-id></mixed-citation></ref>
<ref id="ref117"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Taruffi</surname> <given-names>L.</given-names></name> <name><surname>Pehrs</surname> <given-names>C.</given-names></name> <name><surname>Skouras</surname> <given-names>S.</given-names></name> <name><surname>Koelsch</surname> <given-names>S.</given-names></name></person-group> (<year>2017</year>). <article-title>Effects of sad and happy music on mind-wandering and the default mode network</article-title>. <source>Sci. Rep.</source> <volume>7</volume>:<fpage>14396</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41598-017-14849-0</pub-id>, PMID: <pub-id pub-id-type="pmid">29089542</pub-id></mixed-citation></ref>
<ref id="ref118"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Teller</surname> <given-names>C.</given-names></name> <name><surname>Dennis</surname> <given-names>C.</given-names></name></person-group> (<year>2012</year>). <article-title>The effect of ambient scent on consumers&#x2019; perception, emotions and behaviour: a critical review</article-title>. <source>J. Mark. Manag.</source> <volume>28</volume>, <fpage>14</fpage>&#x2013;<lpage>36</lpage>. doi: <pub-id pub-id-type="doi">10.1080/0267257X.2011.560719</pub-id>, PMID: <pub-id pub-id-type="pmid">40989069</pub-id></mixed-citation></ref>
<ref id="ref119"><mixed-citation publication-type="other"><person-group person-group-type="author"><collab id="coll1">The International Federation of the Phonographic Industry</collab></person-group>. (<year>2023</year>). Engaging with music 2023. Available online at: <ext-link xlink:href="https://www.ifpi.org/wp-content/uploads/2023/12/IFPI-Engaging-With-Music-2023_full-report.pdf" ext-link-type="uri">https://www.ifpi.org/wp-content/uploads/2023/12/IFPI-Engaging-With-Music-2023_full-report.pdf</ext-link> (Accessed June 19, 2025).</mixed-citation></ref>
<ref id="ref120"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Thompson</surname> <given-names>W. F.</given-names></name> <name><surname>Glenn Schellenberg</surname> <given-names>E.</given-names></name> <name><surname>Husain</surname> <given-names>G.</given-names></name></person-group> (<year>2001</year>). <article-title>Arousal, mood and the Mozart effect</article-title>. <source>Psychol. Sci.</source> <volume>12</volume>, <fpage>248</fpage>&#x2013;<lpage>251</lpage>. doi: <pub-id pub-id-type="doi">10.1111/1467-9280.00345</pub-id>, PMID: <pub-id pub-id-type="pmid">11437309</pub-id></mixed-citation></ref>
<ref id="ref121"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Toet</surname> <given-names>A.</given-names></name> <name><surname>Eijsman</surname> <given-names>S.</given-names></name> <name><surname>Liu</surname> <given-names>Y.</given-names></name> <name><surname>Donker</surname> <given-names>S.</given-names></name> <name><surname>Kaneko</surname> <given-names>D.</given-names></name> <name><surname>Brouwer</surname> <given-names>A. M.</given-names></name> <etal/></person-group>. (<year>2020</year>). <article-title>The relation between valence and arousal in subjective odor experience Chemosens</article-title>. <source>Percept.</source> <volume>13</volume>, <fpage>141</fpage>&#x2013;<lpage>151</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s12078-019-09275-7</pub-id></mixed-citation></ref>
<ref id="ref122"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Toffolo</surname> <given-names>M. B. J.</given-names></name> <name><surname>Smeets</surname> <given-names>M. A. M.</given-names></name> <name><surname>van den Hout</surname> <given-names>M. A.</given-names></name></person-group> (<year>2012</year>). <article-title>Proust revisited: Odours as triggers of aversive memories</article-title>. <source>Cognit. Emot.</source> <volume>26</volume>, <fpage>83</fpage>&#x2013;<lpage>92</lpage>. doi: <pub-id pub-id-type="doi">10.1080/02699931.2011.555475</pub-id>, PMID: <pub-id pub-id-type="pmid">21547759</pub-id></mixed-citation></ref>
<ref id="ref123"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>van Dijck</surname> <given-names>J.</given-names></name> <name><surname>Nieborg</surname> <given-names>D.</given-names></name> <name><surname>Poell</surname> <given-names>T.</given-names></name></person-group> (<year>2019</year>). <article-title>Reframing platform power</article-title>. <source>Internet Policy Rev.</source> <volume>8</volume>:<fpage>1414</fpage>. doi: <pub-id pub-id-type="doi">10.14763/2019.2.1414</pub-id></mixed-citation></ref>
<ref id="ref124"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Velasco</surname> <given-names>C.</given-names></name> <name><surname>Balboa</surname> <given-names>D.</given-names></name> <name><surname>Marmolejo-Ramos</surname> <given-names>F.</given-names></name> <name><surname>Spence</surname> <given-names>C.</given-names></name></person-group> (<year>2014</year>). <article-title>Crossmodal effect of music and odor pleasantness on olfactory quality perception</article-title>. <source>Front. Psychol.</source> <volume>5</volume>:<fpage>1352</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2014.01352</pub-id>, PMID: <pub-id pub-id-type="pmid">25506332</pub-id></mixed-citation></ref>
<ref id="ref125"><mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Velasco</surname> <given-names>C.</given-names></name> <name><surname>Obrist</surname> <given-names>M.</given-names></name></person-group> (<year>2020</year>). <source>Multisensory experiences</source>: <publisher-name>Oxford University Press</publisher-name>.</mixed-citation></ref>
<ref id="ref126"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Vi</surname> <given-names>C. T.</given-names></name> <name><surname>Ablart</surname> <given-names>D.</given-names></name> <name><surname>Gatti</surname> <given-names>E.</given-names></name> <name><surname>Velasco</surname> <given-names>C.</given-names></name> <name><surname>Obrist</surname> <given-names>M.</given-names></name></person-group> (<year>2017</year>). <article-title>Not just seeing, but also feeling art: mid-air haptic experiences integrated in a multisensory art exhibition</article-title>. <source>Int. J. Hum.-Comput. Stud.</source> <volume>108</volume>, <fpage>1</fpage>&#x2013;<lpage>14</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.ijhcs.2017.06.004</pub-id></mixed-citation></ref>
<ref id="ref127"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Villemure</surname> <given-names>C.</given-names></name> <name><surname>Slotnick</surname> <given-names>B. M.</given-names></name> <name><surname>Bushnell</surname> <given-names>C. M.</given-names></name></person-group> (<year>2003</year>). <article-title>Effects of odors on pain perception: deciphering the roles of emotion and attention</article-title>. <source>Pain</source> <volume>106</volume>, <fpage>101</fpage>&#x2013;<lpage>108</lpage>. doi: <pub-id pub-id-type="doi">10.1016/S0304-3959(03)00297-5</pub-id>, PMID: <pub-id pub-id-type="pmid">14581116</pub-id></mixed-citation></ref>
<ref id="ref128"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Webster</surname> <given-names>J.</given-names></name></person-group> (<year>2023</year>). <article-title>The promise of personalisation: exploring how music streaming platforms are shaping the performance of class identities and distinction</article-title>. <source>New Media Soc.</source> <volume>25</volume>, <fpage>2140</fpage>&#x2013;<lpage>2162</lpage>. doi: <pub-id pub-id-type="doi">10.1177/14614448211027863</pub-id></mixed-citation></ref>
<ref id="ref129"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Willander</surname> <given-names>J.</given-names></name> <name><surname>Larsson</surname> <given-names>M.</given-names></name></person-group> (<year>2006</year>). <article-title>Smell your way Back to childhood: autobiographical odor memory</article-title>. <source>Psychon. Bull. Rev.</source> <volume>13</volume>, <fpage>240</fpage>&#x2013;<lpage>244</lpage>. doi: <pub-id pub-id-type="doi">10.3758/BF03193837</pub-id>, PMID: <pub-id pub-id-type="pmid">16892988</pub-id></mixed-citation></ref>
<ref id="ref130"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Willander</surname> <given-names>J.</given-names></name> <name><surname>Larsson</surname> <given-names>M.</given-names></name></person-group> (<year>2007</year>). <article-title>Olfaction and emotion: the case of autobiographical memory</article-title>. <source>Mem. Cogn.</source> <volume>35</volume>, <fpage>1659</fpage>&#x2013;<lpage>1663</lpage>. doi: <pub-id pub-id-type="doi">10.3758/BF03193499</pub-id>, PMID: <pub-id pub-id-type="pmid">18062543</pub-id></mixed-citation></ref>
<ref id="ref131"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Wrzesniewski</surname> <given-names>A.</given-names></name></person-group> (<year>1999</year>). <article-title>Odor and affect: individual differences in the impact of odor on liking for places, things and people Chem</article-title>. <source>Senses</source> <volume>24</volume>, <fpage>713</fpage>&#x2013;<lpage>721</lpage>. doi: <pub-id pub-id-type="doi">10.1093/chemse/24.6.713</pub-id>, PMID: <pub-id pub-id-type="pmid">10587506</pub-id></mixed-citation></ref>
<ref id="ref132"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Yang</surname> <given-names>Y.</given-names></name> <name><surname>Cai</surname> <given-names>P.</given-names></name></person-group> (<year>2024</year>). <article-title>Multisensory experience design: a literature review</article-title>. <source>Commun. Human. Res.</source> <volume>27</volume>, <fpage>34</fpage>&#x2013;<lpage>37</lpage>. doi: <pub-id pub-id-type="doi">10.54254/2753-7064/27/20232117</pub-id></mixed-citation></ref>
<ref id="ref133"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Zhou</surname> <given-names>C.</given-names></name> <name><surname>Yamanaka</surname> <given-names>T.</given-names></name></person-group> (<year>2018</year>). <article-title>How does congruence of scent and music affect people&#x2019;s emotions</article-title>. <source>Int. J. Affect. Eng.</source> <volume>17</volume>:<fpage>IJAE-D-17-00032</fpage>. doi: <pub-id pub-id-type="doi">10.5057/ijae.IJAE-D-17-00032</pub-id></mixed-citation></ref>
<ref id="ref134"><mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Zhuk</surname> <given-names>A.</given-names></name></person-group> (<year>2023</year>). <article-title>Artificial intelligence impact on the environment: hidden ecological costs and ethical-legal issues</article-title>. <source>J. Digit. Technol. Law</source> <volume>1</volume>, <fpage>932</fpage>&#x2013;<lpage>954</lpage>. doi: <pub-id pub-id-type="doi">10.21202/jdtl.2023.40</pub-id></mixed-citation></ref>
</ref-list><fn-group><fn id="fn0001" fn-type="custom" custom-type="edited-by"><p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/182357/overview">Sebastien Fiorucci</ext-link>, Universit&#x00E9; C&#x00F4;te d'Azur, France</p></fn>
<fn id="fn0002" fn-type="custom" custom-type="reviewed-by"><p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/181267/overview">Filippo Cieri</ext-link>, Cleveland Clinic, United States; <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2802470/overview">Pantelis Pergantis</ext-link>, University of the Aegean, Greece</p></fn></fn-group></back>
</article>