<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article article-type="research-article" dtd-version="2.3" xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Virtual Real.</journal-id>
<journal-title>Frontiers in Virtual Reality</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Virtual Real.</abbrev-journal-title>
<issn pub-type="epub">2673-4192</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">1056782</article-id>
<article-id pub-id-type="doi">10.3389/frvir.2022.1056782</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Virtual Reality</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Examining cross-modal correspondence between ambient color and taste perception in virtual reality</article-title>
<alt-title alt-title-type="left-running-head">Wu et al.</alt-title>
<alt-title alt-title-type="right-running-head">
<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/frvir.2022.1056782">10.3389/frvir.2022.1056782</ext-link>
</alt-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name>
<surname>Wu</surname>
<given-names>Zhen</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="fn" rid="fn1">
<sup>&#x2020;</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2032047/overview"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Shi</surname>
<given-names>Rongkai</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="fn" rid="fn1">
<sup>&#x2020;</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2030597/overview"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Li</surname>
<given-names>Ziming</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2113733/overview"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Jiang</surname>
<given-names>Mengqi</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<xref ref-type="fn" rid="fn1">
<sup>&#x2020;</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2113229/overview"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Li</surname>
<given-names>Yue</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1597572/overview"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Yu</surname>
<given-names>Lingyun</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2113870/overview"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Liang</surname>
<given-names>Hai-Ning</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="corresp" rid="c001">&#x2a;</xref>
<uri xlink:href="https://loop.frontiersin.org/people/582502/overview"/>
</contrib>
</contrib-group>
<aff id="aff1">
<sup>1</sup>
<institution>Division of Integrative Systems and Design</institution>, <institution>Hong Kong University of Science and Technology</institution>, <addr-line>Hong Kong</addr-line>, <country>Hong Kong SAR, China</country>
</aff>
<aff id="aff2">
<sup>2</sup>
<institution>Department of Computing</institution>, <institution>Xi&#x2019;an Jiaotong-Liverpool University</institution>, <addr-line>Suzhou</addr-line>, <country>China</country>
</aff>
<aff id="aff3">
<sup>3</sup>
<institution>School of System Design and Intelligent Manufacturing</institution>, <institution>Southern University of Science and Technology</institution>, <addr-line>Shenzhen</addr-line>, <country>China</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>
<bold>Edited by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/1757359/overview">Eike Langbehn</ext-link>, Hamburg University of Applied Sciences, Germany</p>
</fn>
<fn fn-type="edited-by">
<p>
<bold>Reviewed by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2053834/overview">Sandra Malpica</ext-link>, University of Zaragoza, Spain</p>
<p>
<ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/194626/overview">Andy Thomas Woods</ext-link>, University of London, United Kingdom</p>
</fn>
<corresp id="c001">&#x2a;Correspondence: Hai-Ning Liang, <email>haining.liang@xjtlu.edu.cn</email>
</corresp>
<fn fn-type="equal" id="fn1">
<label>
<sup>&#x2020;</sup>
</label>
<p>These authors have contributed equally to this work</p>
</fn>
<fn fn-type="other">
<p>This article was submitted to Technologies for VR, a section of the journal Frontiers in Virtual Reality</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>02</day>
<month>12</month>
<year>2022</year>
</pub-date>
<pub-date pub-type="collection">
<year>2022</year>
</pub-date>
<volume>3</volume>
<elocation-id>1056782</elocation-id>
<history>
<date date-type="received">
<day>29</day>
<month>09</month>
<year>2022</year>
</date>
<date date-type="accepted">
<day>22</day>
<month>11</month>
<year>2022</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2022 Wu, Shi, Li, Jiang, Li, Yu and Liang.</copyright-statement>
<copyright-year>2022</copyright-year>
<copyright-holder>Wu, Shi, Li, Jiang, Li, Yu and Liang</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<p>This research explores the cross-modal correspondence effect of ambient color on people&#x2019;s taste perception in virtual reality (VR). To this end, we designed and conducted two experiments to investigate whether and how taste-congruent ambient colors in VR influence taste perception measured by four aspects: 1) taste ratings of a neutral drink; 2) taste association with virtual environments; 3) associated scenarios when immersed in these virtual environments; and 4) participants&#x2019; liking of these environments. In Experiment 1, participants adjusted the ambient light with different cross-modal-related colors in the immersive environments and reported their scaling of the Virtual Reality Sickness Questionnaire (VRSQ). Comfortable light intensity for each ambient color was obtained and color recognition problems were observed. In Experiment 2, participants tasted black tea (as the neutral drink), after being exposed to eight different virtual environments with different ambient colors. Results showed that the pink ambient color significantly increased the sweetness ratings. Differences in the color-taste association and environment liking were also observed in the ambient color conditions. Our results provide new insights into the cross-modal correspondence effect on ambient color and taste perception not found in prior work in VR scenarios.</p>
</abstract>
<kwd-group>
<kwd>virtual reality</kwd>
<kwd>human-food interaction</kwd>
<kwd>cross-modal correspondence</kwd>
<kwd>visual perception</kwd>
<kwd>user study</kwd>
<kwd>tasting</kwd>
</kwd-group>
</article-meta>
</front>
<body>
<sec id="s1">
<title>1 Introduction</title>
<p>Cross-modal correspondence (CMC) is the interaction among various senses, where people map sensations in one sensory modality onto other modalities in a consistent manner (<xref ref-type="bibr" rid="B51">Spence, 2011</xref>). Taste is one of the most common sensations that people experience daily. Basic taste perceptions include sweetness, sourness, bitterness, saltiness, and umami<xref ref-type="fn" rid="fn2">
<sup>1</sup>
</xref> (<xref ref-type="bibr" rid="B27">Keast and Breslin, 2003</xref>). Prior studies have explored how taste perception can be formed with other senses <italic>via</italic> CMC (<xref ref-type="bibr" rid="B30">Kn&#xf6;ferle and Spence, 2012</xref>; <xref ref-type="bibr" rid="B28">Kerruish, 2019</xref>; <xref ref-type="bibr" rid="B23">Halabi and Saleh, 2021</xref>). Existing research has provided evidence that color is an intrinsic and extrinsic visual stimulus influencing taste. Intrinsic colors are those features of the food itself, including the color of the food (<xref ref-type="bibr" rid="B37">Maga, 1974</xref>; <xref ref-type="bibr" rid="B26">Johnson and Clydesdale, 1982</xref>; <xref ref-type="bibr" rid="B8">Clydesdale. et al., 1992</xref>; <xref ref-type="bibr" rid="B6">Charles et al., 2010</xref>; <xref ref-type="bibr" rid="B56">Spence and Piqueras-Fiszman, 2016</xref>). Extrinsic colors are those external to the food, including the influence of color itself (<xref ref-type="bibr" rid="B31">Koch and Koch, 2003</xref>; <xref ref-type="bibr" rid="B64">Wan et al., 2014</xref>), the color of the container (like the cup or the bowl) (<xref ref-type="bibr" rid="B24">Harrar et al., 2011</xref>; <xref ref-type="bibr" rid="B47">Piqueras-Fiszman and Spence, 2012</xref>; <xref ref-type="bibr" rid="B58">Spence and Velasco, 2018</xref>; <xref ref-type="bibr" rid="B61">Sugimori and Kawasaki, 2022</xref>), and the color of the environment (<xref ref-type="bibr" rid="B42">Oberfeld et al., 2009</xref>).</p>
<p>Recently, there has been an increasing trend of using relatively newer interactive technologies to enhance or modify taste perception (<xref ref-type="bibr" rid="B18">Gallace et al., 2012</xref>; <xref ref-type="bibr" rid="B39">Moser and Tscheligi, 2013</xref>; <xref ref-type="bibr" rid="B41">Nishizawa et al., 2016</xref>; <xref ref-type="bibr" rid="B1">Arnold, 2017</xref>; <xref ref-type="bibr" rid="B28">Kerruish, 2019</xref>; <xref ref-type="bibr" rid="B23">Halabi and Saleh, 2021</xref>). For example, <xref ref-type="bibr" rid="B40">Narumi et al. (2011)</xref> proposed MetaCookie&#x2b;, a pseudo-gustatory system that uses an augmented reality head-mounted display (AR HMD) to overlay both visual and olfactory information on real cookies to observe taste alterations. Another work by <xref ref-type="bibr" rid="B41">Nishizawa et al. (2016)</xref> introduced a projective-AR system to modify the flavor and taste of chips by changing their hue properties. Besides AR environments, researchers found immersive virtual reality (VR) environments can also influence people&#x2019;s food or beverage taste experience. VR is regarded as a new medium to provide visual stimulation and is gradually becoming a research tool for the discipline of perception (<xref ref-type="bibr" rid="B32">Kuliga et al., 2015</xref>; <xref ref-type="bibr" rid="B36">Lipson-Smith et al., 2020</xref>). Previous works show that this medium has several benefits. The users can have higher engagement and taste perception consistency in an immersive virtual environment compared to a physical environment (<xref ref-type="bibr" rid="B5">Carlson et al., 2011</xref>; <xref ref-type="bibr" rid="B34">Ledoux et al., 2013</xref>; <xref ref-type="bibr" rid="B2">Bangcuyo et al., 2015</xref>; <xref ref-type="bibr" rid="B60">Stelick et al., 2018</xref>). VR also presents a cost-effective way to provide various stimuli (<xref ref-type="bibr" rid="B36">Lipson-Smith et al., 2020</xref>) and thus can be used to conduct empirical studies on psychology or perception.</p>
<p>Researchers have utilized this medium to study the effects of color on taste perception, especially the beverage&#x2019;s (intrinsic) color on its tasting. For example, research from <xref ref-type="bibr" rid="B66">Wang et al. (2020)</xref> modified the color of the coffee in VR and <xref ref-type="bibr" rid="B25">Huang et al. (2019)</xref> studied the influence of tea color simulation in VR. In addition to changing the color of the food, <xref ref-type="bibr" rid="B7">Chen et al. (2020)</xref> studied the synthesis of multiple visual elements, with the environmental color as one of the variables. Although these works explored the feasibility of conducting taste perception studies related to color in virtual environments, they either focused on manipulating the intrinsic color of the foodstuff or dealing with complex VR scenes with various visual elements or items than colors only. Compared with the modification of the intrinsic colors or textures, the focus on environmental color may be more suitable as VR environments emphasize the visual immersion of users. However, from <xref ref-type="bibr" rid="B7">Chen et al. (2020)</xref>&#x2019;s experiment, we can not decide whether the change in taste perception was because of the color or other visual stimuli. A simple and specific type of extrinsic visual stimulus from the virtual environment is worthy of investigation because we can then isolate the effect of specific visual aspects of such environments and leverage the findings to develop VR environments tailored to more specific purposes.</p>
<p>In this work, we investigate the influence of ambient light color variations in virtual environments. This type of stimulus has been studied in <italic>physical settings</italic>, presenting a significant influence on taste perception (<xref ref-type="bibr" rid="B42">Oberfeld et al., 2009</xref>; <xref ref-type="bibr" rid="B57">Spence et al., 2014</xref>), but it has not been explored in <italic>VR environments</italic>. Ambient light color is ubiquitous in such environments and fits the characteristic of immersion as an extrinsic indirect visual cue. However, limited research has investigated how could this parameter in immersive virtual environments influences taste perception. To fill this research gap in our understanding of how ambient light affects taste perceptions in virtual environments, we run two experiments that help answer two research questions (RQs).</p>
<p>RQ1: What will the suitable environment parameter be when we apply the cross-modal-related colors to the VR environments? is the main research question explored in Experiment 1. Previous work revealed two potential problems that color studies in VR environments may suffer: <bold>1)</bold> participants may experience visually induced motion sickness (VIMS) due to the unpleasant colors (<xref ref-type="bibr" rid="B3">Bonato et al., 2004</xref>; <xref ref-type="bibr" rid="B49">So and Yuen, 2007</xref>; <xref ref-type="bibr" rid="B21">Gusev et al., 2018</xref>; <xref ref-type="bibr" rid="B20">Grassini et al., 2021</xref>); and <bold>2)</bold> there may be a recognition deviation between the color of the ambient light presented through the VR HMD and the color perceived in the physical setting. Based on these, we conducted Experiment 1 to evaluate the impact of these issues and set up the ambient light conditions to be suitable for color-taste studies. We formulated the testing virtual environments with 11 ambient colors, including black, green, grey, orange, pink, purple, red, yellow, white, blue, and brown. Participants adjusted the light intensity of these environments to make them comfortable, then reported their perceived VIMS and recognized color when exposed to the environment. Our findings show that brown, white, and grey ambient lights were considered to be more comfortable. However, these three colors were not well recognized and thus were excluded in Experiment 2.</p>
<p>RQ2: How will taste perception be influenced by different ambient colors? is the research question for Experiment 2, where we further studied the influence of the ambient color setups <italic>via</italic> Experiment 1 on perceived taste in VR environments. For this research question, we investigated the following three aspects of taste perception: <bold>1)</bold> participants&#x2019; perceived taste of the tastant in terms of sweetness, sourness, bitterness, and saltiness; <bold>2)</bold> participants&#x2019; taste association under the stimuli together with the associated scenarios that participants imagined; and <bold>3)</bold> participants&#x2019; preference of the tested virtual environments where they tasted the tastant. According to <xref ref-type="bibr" rid="B64">Wan et al. (2014)</xref>, we excluded umami in our study because it did not show a strong association with any color, shape, or texture in real-life settings. Moreover, the term umami may be difficult to understand due to its inconsistent perceptions across different cultural backgrounds (<xref ref-type="bibr" rid="B19">Gotow et al., 2021</xref>). We selected black tea as our neutral tastant based on a pilot study. Our findings reveal that ambient color significantly influenced the sweet taste of the tastant. Pink and purple showed a significant association with the sweet taste rather than the bitter taste while black and green were associated with bitterness more frequently compared with sweetness. In terms of the associated scenarios coupled with the color-taste association, most of the participants associated the sweet taste with food-related items, but the bitter taste with specific scenes. Finally, orange ambient light was preferred over red.</p>
<p>Our work presents two main contributions:<list list-type="simple">
<list-item>
<p>&#x2022; An experiment protocol that uses ambient color for taste perception studies in virtual environments.</p>
</list-item>
<list-item>
<p>&#x2022; Evidence that ambient color is a significant factor for perceived sweetness and bitterness sensations in VR.</p>
</list-item>
</list>
</p>
</sec>
<sec id="s2">
<title>2 Related work</title>
<sec id="s2-1">
<title>2.1 Taste perception in virtual environments</title>
<p>Taste experience can be introduced to different types of virtual environments, ranging from curved computer-based screens to AR/VR HMDs (<xref ref-type="bibr" rid="B65">Wang et al., 2021</xref>). There is an increasing trend of using virtual environments in human-food interaction research because they allow researchers to easily add environmental features or properties and minimize certain negative effects of real settings (<xref ref-type="bibr" rid="B2">Bangcuyo et al., 2015</xref>; <xref ref-type="bibr" rid="B13">Delarue et al., 2019</xref>). For instance, compared to traditional booths, the hedonic data collected in a virtual coffee house can be more identifiable and reliable as a predictor for future coffee evaluation (<xref ref-type="bibr" rid="B2">Bangcuyo et al., 2015</xref>). <xref ref-type="bibr" rid="B13">Delarue et al. (2019)</xref> have shown that a multi-sensory immersive room could minimize the differences in product testing results produced at different periods in a lab setting. Moreover, introducing the taste experience to VR also benefits the immersive experience. Eating and drinking unify both virtual and physical sensory cues (<xref ref-type="bibr" rid="B55">Spence, 2016</xref>). Thus, they can increase users&#x2019; presence in the virtual environments, i.e., their sense of being there (<xref ref-type="bibr" rid="B48">Slater and Wilbur, 1997</xref>). <xref ref-type="bibr" rid="B65">Wang et al. (2021)</xref> also suggested that introducing physical interaction mechanisms that the users are used to in the physical world into the immersive virtual environments can enhance the psychological feeling of presence. Furthermore, <xref ref-type="bibr" rid="B18">Gallace et al. (2012)</xref> stated that increasing the number of senses stimulated in VR can increase users&#x2019; enjoyment, memorability, and presence. Since taste is one of the major sensory modalities familiar to people in real-life settings, it is beneficial to have it in VR. Eating and food-orientating activities may also become integral to the virtual experience in social contexts as VR becomes more available and allow people to meet on platforms like VRChat<xref ref-type="fn" rid="fn3">
<sup>2</sup>
</xref> for socialization and Horizon Workrooms<xref ref-type="fn" rid="fn4">
<sup>3</sup>
</xref> for business meetings.</p>
<p>On the other hand, taste-related results in virtual environments may be different from those obtained in physical environments. For example, the virtual environment may significantly influence users&#x2019; hedonic responses to foodstuffs. Beef can be rated higher in terms of liking when consumed in a VR restaurant compared to a traditional sensory laboratory condition (<xref ref-type="bibr" rid="B12">Crofton et al., 2021</xref>). <xref ref-type="bibr" rid="B60">Stelick et al. (2018)</xref> also found that VR had a significant influence on the taste perception of blue cheese, especially the level of pungency. <xref ref-type="bibr" rid="B34">Ledoux et al. (2013)</xref> showed that food craving produced by VR was marginally greater than a neutral cue in a physical setting.</p>
</sec>
<sec id="s2-2">
<title>2.2 Cross-modal effect between color and taste</title>
<p>Cross-modal correspondence (CMC) is about the interactions among different senses. People combine information captured from various senses to form a more comprehensive view of the external world or activity they are doing (e.g., when people see and smell food in front of them). Interaction between these systems is an essential part of this process (<xref ref-type="bibr" rid="B59">Stein and Meredith, 1993</xref>). In particular, based on CMC, people can map features or sensations in one sensory modality onto features of other modalities in a consistent manner (<xref ref-type="bibr" rid="B51">Spence, 2011</xref>). For example, the taste of sweetness can correspond to a round shape (visual) or high pitch (audio). Surprisingly, CMC is not confined to one group (cultural or otherwise) but is often universally shared (<xref ref-type="bibr" rid="B30">Kn&#xf6;ferle and Spence, 2012</xref>). Taste is one of the most common sensations that people experience daily. Basic taste perceptions include sweetness, sourness, bitterness, saltiness, and savor (umami) (<xref ref-type="bibr" rid="B27">Keast and Breslin, 2003</xref>), which can often be formed with other senses, such as smell or sound (<xref ref-type="bibr" rid="B30">Kn&#xf6;ferle and Spence, 2012</xref>; <xref ref-type="bibr" rid="B28">Kerruish, 2019</xref>; <xref ref-type="bibr" rid="B23">Halabi and Saleh, 2021</xref>).</p>
<p>Color is an essential aspect of CMC between vision and taste. Color-taste correspondence is of great interest to the food industry, especially in helping design food product packaging because of its direct influence on consumers&#x2019; expectations (<xref ref-type="bibr" rid="B4">Cardello, 1994</xref>; <xref ref-type="bibr" rid="B31">Koch and Koch, 2003</xref>). To a large extent, it serves as a sensory property of the product before its consumption (<xref ref-type="bibr" rid="B58">Spence and Velasco, 2018</xref>). Color is a focused factor that significantly affects taste perception (<xref ref-type="bibr" rid="B46">Piqueras-Fiszman and Spence, 2014</xref>). For example, from the studies of color itself, <xref ref-type="bibr" rid="B31">Koch and Koch (2003)</xref> have stated that red is connected to sweetness while yellow is connected to sour, citrus, and fruity tastes. In addition, <xref ref-type="bibr" rid="B64">Wan et al. (2014)</xref> have shown that some strong CMCs are similar in different cultures, such as mapping black with bitterness and pink with sweetness.</p>
<p>Factors influencing multi-sensory flavor perception can be divided into either <italic>intrinsic</italic> to the food (e.g., texture, aroma) or <italic>extrinsic</italic> to it (e.g., related to the packaging, receptacle, or external environment) (<xref ref-type="bibr" rid="B67">Wang et al., 2019</xref>), both of which have been studied by previous research on color-taste CMC. When color represents the original feature of the foodstuff, it is considered an intrinsic visual stimulus feature (<xref ref-type="bibr" rid="B54">Spence, 2019b</xref>). For example, <xref ref-type="bibr" rid="B26">Johnson and Clydesdale (1982)</xref> found that applying red color to water could increase the sweet taste. Similarly, <xref ref-type="bibr" rid="B37">Maga (1974)</xref> showed that green statistically increased sweet taste threshold sensitivity while yellow decreased taste sensitivity. Different from intrinsic features, extrinsic factors are outside of the food itself. For example, <xref ref-type="bibr" rid="B47">Piqueras-Fiszman and Spence (2012)</xref> have shown that orange and dark-cream cups could enhance the chocolate flavor of a drink. <xref ref-type="bibr" rid="B24">Harrar et al. (2011)</xref> investigated popcorn in four different colored bowls and found that the sweet popcorn, in addition to being sweet, was perceived as saltier when consumed in a colored bowl than in a white bowl.</p>
<p>Specifically, we noticed that the color of ambient light is a type of extrinsic factor that can influence taste. <xref ref-type="bibr" rid="B50">Spence and Carvalho (2020)</xref> stressed the importance of the environment where the beverages are consumed. The ambient color of an environment could significantly affect the beverage&#x2019;s flavor and taste (<xref ref-type="bibr" rid="B42">Oberfeld et al., 2009</xref>). <xref ref-type="bibr" rid="B42">Oberfeld et al. (2009)</xref> showed that wine was perceived to be spicier in blue or green ambient lighting than in red or white. <xref ref-type="bibr" rid="B57">Spence et al. (2014)</xref> explored people&#x2019;s taste perception of red wine in a room that changes ambient color and music and found a significant difference in taste and beverage liking. Their results provide empirical support for the claim about the influence of ambient color on flavor from <xref ref-type="bibr" rid="B42">Oberfeld et al. (2009)</xref>.</p>
<p>In general, there are three possible mechanisms underpinning the CMCs (<xref ref-type="bibr" rid="B51">Spence, 2011</xref>; <xref ref-type="bibr" rid="B45">Parise and Spence, 2013</xref>): 1) <italic>structural correspondence</italic>, described from the neural science side, is the byproduct of the innate cognitive system; 2) <italic>statistical correspondence</italic>, describes a learned process associating the established experience and the environment; and 3) <italic>semantic correspondence</italic>, stresses that the same terms can be used to describe different stimuli linguistically. For color-taste CMC, there are mainly two types of specific explanations for their correspondence. The first type is the association with the source object(s), meaning that the color-taste pairs result from associating specific colors with certain gustatory cues (<xref ref-type="bibr" rid="B66">Wang et al., 2020</xref>). Moreover, <xref ref-type="bibr" rid="B52">Spence and Levitan (2021)</xref> found that some color-taste associations are not limited to a particular source object. The second explanation is users&#x2019; emotional mediation. For example, the pink-sweet association arises because both stimuli are linked with happiness independently and are connected by people (<xref ref-type="bibr" rid="B52">Spence and Levitan, 2021</xref>). It is important to note that some colors with positive emotional valance in abstract scenarios may lead to negative emotions when matching with food. For instance, blue is a favorite color to many people, but blue-colored food can trigger negative feelings (<xref ref-type="bibr" rid="B53">Spence, 2019a</xref>).</p>
</sec>
<sec id="s2-3">
<title>2.3 Color-taste study in VR</title>
<p>The color-related research in the physical world has inspired some researchers to apply them in VR. <xref ref-type="bibr" rid="B66">Wang et al. (2020)</xref> designed an experiment manipulating the color of coffee in VR so that their participants saw either a dark brown or light brown liquid as they consumed the real coffee. Similarly, <xref ref-type="bibr" rid="B25">Huang et al. (2019)</xref> designed three experiments to study the influence of actual tea color simulation in VR on users&#x2019; taste ratings of Chinese red and green tea. Moreover, <xref ref-type="bibr" rid="B7">Chen et al. (2020)</xref>&#x2019;s study went beyond colors. They created a sweet, bitter, and neutral scene integrating shape, color, and visual textures, and assessed their influence on perceived sweetness and environment liking. Results showed that the sweet-congruent environment significantly increased the perceived sweetness of the beverage they tested. However, these studies either focus on manipulating the intrinsic color of the foodstuff in VR or dealing with complex VR scenes.</p>
<p>As discussed in the previous section, the color of the ambient light is an effective stimulus studied in the physical environment. This inspired us to consider if similar effects can be reproduced and leveraged in VR environments. The virtual environments may cost-effectively control or simulate the ambient color rooms while providing fully colored immersive environments. Firstly, the color of the ambient light is a simple parameter that can be controlled in VR and be compared with previous works done in the physical setting. Although not related to taste, researchers have already suggested using VR for ambient color studies. For example, <xref ref-type="bibr" rid="B43">Oberfeld and Hecht (2011)</xref> studied the influence of environmental color on the perceived height and width of interior rooms virtually. <xref ref-type="bibr" rid="B63">von Castell et al. (2018)</xref> also studied the effects of the hue, saturation, and luminance of color on the perceived height of interior spaces <italic>via</italic> a VR HMD. <xref ref-type="bibr" rid="B36">Lipson-Smith et al. (2020)</xref> did a study focusing on the mood and preference towards the virtual environments varying in colors. In the meantime, applying ambient light as an environmental parameter to VR leverages the immersion provided by this medium well. In VR development tools, ambient light hue is an important parameter in creating the visual appearance of the scene and its objects. Changing ambient light hue can effectively alter the color scheme of all the 3D models (objects) in the VR scene (<xref ref-type="bibr" rid="B62">Unity, 2019</xref>). Based on these, we chose ambient light varying in colors as our stimuli to study its influence on taste perception in VR. While ambient light is essential to and a foundation building block to all VR environments, its effect on taste perception has not been explored in detail. Our research aims to fill this important but underexplored area.</p>
</sec>
<sec id="s2-4">
<title>2.4 Potential issues for VR color-taste studies</title>
<p>In a VR environment, color may induce Visually Induced Motion Sickness (VIMS) and make the application unusable (<xref ref-type="bibr" rid="B22">Gusev et al., 2016</xref>). <xref ref-type="bibr" rid="B3">Bonato et al. (2004)</xref> simulated the optokinetic drum (spinning sphere) with specific color patterns in a virtual environment. They suggested that the chromaticity might increase the inconsistency between visual and vestibular inputs. However, <xref ref-type="bibr" rid="B49">So and Yuen (2007)</xref> found the change in the colors would not influence VIMS significantly in VR. Besides, colored environments may lead to a higher VIMS compared to monochrome environments (<xref ref-type="bibr" rid="B21">Gusev et al., 2018</xref>). Based on the literature, it is not clear whether the ambient color in VR would cause VIMS and, if so, to what extent the VIMS would be. To avoid discomfort and bias in our color-taste experiment, we conducted the first experiment to adjust the ambient colors in the virtual environments to ensure they were not harmful to the experience. Color recognition can be another potential issue for VR color-taste studies. It is important to consider whether people could depict the ambient color hue in a VR environment as they perceive it in the real world (<xref ref-type="bibr" rid="B36">Lipson-Smith et al., 2020</xref>). We also want to filter out this issue before running the color-taste experiment.</p>
<p>Selecting a suitable tastant is also important for a color-taste study. Some of the existing studies in CMC between taste and other stimuli used word-based descriptions or images of the food. <xref ref-type="bibr" rid="B11">Crisinel and Spence (2009)</xref> used Implicit Association Test and applied food images as visual stimuli to investigate the association between basic taste and pitch (auditory parameter). Similarly, <xref ref-type="bibr" rid="B64">Wan et al. (2014)</xref> used graphics with words referring to taste as the visual stimuli in their online study. The participants dragged the image to the word they thought it was associated with. On the contrary, <xref ref-type="bibr" rid="B10">Crisinel and Spence (2010)</xref> did a repeated experiment using real tastants. However, they got similar taste perception results as those evoked by the food names. As reported by <xref ref-type="bibr" rid="B11">Crisinel and Spence (2009)</xref> in their work, although it was convenient to use the names of food or drinks in the study, it entailed several disadvantages. First, between-participant differences might exist, such as the same word &#x201c;beer&#x201d; may associate with various distinctive tastes. Second, excluding real tastants prevented them from studying the direct interaction between the sensory modalities. As such, in our study, we did not use words and instead used a real tastant because this is more aligned with the use of VR, as a platform that brings users to experience virtual environments but allows them to still be closely associated with the physical world.</p>
<p>Previous studies in taste and virtual environments have utilized a variety of tastant choices. We collected some representative ones and listed also in <xref ref-type="table" rid="T1">Table 1</xref>. Among these, we found that a beverage is the most frequently used tastant. In addition, drinking is relatively more convenient and safer than eating. Thus, we used beverages as our taste stimuli. We anticipated that a beverage with a very strong taste would lead towards one specific taste and bias the results. Due to this, a neutral tastant that would not affect participants&#x2019; taste perception is required. However, limited research has discussed the rationale behind choosing the tastants and whether their tastes are neutral. We decided to run a pilot study to choose a neutral tastant before our formal color-taste experiment.</p>
<table-wrap id="T1" position="float">
<label>TABLE 1</label>
<caption>
<p>A summary of related work about taste and the immersive environments with the used stimuli. Tastants are highlighted in bold and tested stimuli were marked in italic.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="left">References</th>
<th align="left">Description</th>
<th align="left">Main difference between this work and our research</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td colspan="3" align="left">Taste perception in immersive virtual scenes (see also <xref ref-type="sec" rid="s2-1">Section 2.1</xref>)</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B12">Crofton et al. (2021)</xref>
</td>
<td align="left">Studying the impact of different virtual environments on people&#x2019;s hedonic ratings of beef and chocolate. Scenarios were <italic>traditional sensory booths, a VR restaurant, a VR Irish countryside, and a VR busy city</italic>
</td>
<td align="left">(1) Their focus was specific scene simulation instead of CMC-related variables. (2) Their emphasis was on users&#x2019; hedonic experiences instead of taste</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B60">Stelick et al. (2018)</xref>
</td>
<td align="left">Adapting VR to sensory evaluation <italic>via</italic> a pilot test to collect participants&#x2019; feelings on blue cheese in virtual contexts using <italic>Custom-recorded 360-degree videos of a sensory booth, a park bench, and a cow barn in a VR HMD</italic>.</td>
<td align="left">Same as above</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B2">Bangcuyo et al. (2015)</xref>
</td>
<td align="left">Studying the use of immersive techniques in consumers&#x2019; hedonic teste of (whole bean) coffee in <italic>virtual coffeehouses with or without contextual information</italic>
</td>
<td align="left">Same as above</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B13">Delarue et al. (2019)</xref>
</td>
<td align="left">Studying the experience of alcohol-free beer in <italic>multi-sensory immersive rooms simulating a nightclub and a beach</italic>
</td>
<td align="left">Same as above</td>
</tr>
<tr>
<td colspan="3" align="left">Visual-taste cross-modal correspondence in the physical environment (see also <xref ref-type="sec" rid="s2-2">Section 2.2</xref>)</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B31">Koch and Koch. (2003)</xref>
</td>
<td align="left">Examining users&#x2019; preconception of taste, using <italic>10 colors and 8 tastes words in the form of scale-based questions on paper</italic>
</td>
<td align="left">(1) We included a real tastant. (2) We did the color experiment in virtual environments rather than through a computer screen or paper-based questionnaires</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B64">Wan et al. (2014)</xref>
</td>
<td align="left">Investigating the crossmodal-correspondences between visual features (including colors, shapes, textures) and basic taste words, using <italic>Text descriptors and images of the visual stimuli, presented against a gray background</italic>
</td>
<td align="left">Same as above</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B61">Sugimori and Kawasaki. (2022)</xref>
</td>
<td align="left">Investigating the effect of background color on taste using chocolate and green tea with varying bitterness levels. Tested stimuli were <italic>black or pink wrapping paper</italic> for chocolate and <italic>clear blue cup or clear cup</italic> for green tea</td>
<td align="left">We did the experiment in a virtual environment seeing through a VR HMD and focused on ambient light</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B24">Harrar et al. (2011)</xref>
</td>
<td align="left">Investigating whether the color of the bowl would affect the taste of the popcorn it contains. Comparing <italic>Four different colored bowls: white, green, red, and blue</italic>
</td>
<td align="left">Same as above</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B42">Oberfeld et al. (2009)</xref>
</td>
<td align="left">Investigating the effect of the ambient room color on the flavor of wine, while maintaining the color of the beverage. Comparing four types of <italic>ambient lighting vary in colors</italic>: blue, green, red, white</td>
<td align="left">(1) Although we both focused on ambient colors, we introduced VR and took into account aspects not explored before. (2) The tested parameters of this work were focused on wine (e,g, fruitiness, spiciness). Our work is more general to explore basic tastes</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B57">Spence et al. (2014)</xref>
</td>
<td align="left">Investigating the combination of ambient light and music on the wine drinking experience. Tested stimuli were: <italic>(1) white, red, green lighting with sour music, and red lighting with sweet music. (2) white, green, red lighting with sweet music, and green lighting with sour music</italic>
</td>
<td align="left">Same as above and we focused on ambient light only and not specific to wine</td>
</tr>
<tr>
<td colspan="3" align="left">Visual-taste cross-modal effect in immersive virtual environments (see also <xref ref-type="sec" rid="s2-3">Section 2.3</xref>)</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B40">Narumi et al. (2011)</xref>
</td>
<td align="left">Investigating the effectiveness of augmented reality flavor gustatory display based on edible cues and cross-modal interaction (using cookies as the tastant) by <italic>overlaying visual and olfactory information</italic> onto a real cookie with special AR marker patterns</td>
<td align="left">We modified the extrinsic color (an environmental factor) as opposed to the intrinsic color of the foodstuff</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B25">Huang et al. (2019)</xref>
</td>
<td align="left">Assessing the influence of <italic>virtual color of Chinese green and red tea in a VR HMD</italic> on the taste of Chinese tea</td>
<td align="left">Same as above</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B7">Chen et al. (2020)</xref>
</td>
<td align="left">Investigating the impact of visual-taste congruency on perceived sweetness and product liking in immersive VR (using syrup diluted with water as the tastant). Stimuli were <italic>rooms with sweet-congruent, bitter-congruent, and neutral visual cues in a VR HMD</italic>.</td>
<td align="left">Same as above</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B66">Wang et al. (2020)</xref>
</td>
<td align="left">Exploring whether making coffee look milkier in a VR environment can alter its perceived flavor and liking. Stimuli were <italic>dark brown or light brown coffee in a VR HMD</italic>.</td>
<td align="left">Our choice of stimuli is more generic and focuses on environmental colors instead of including other visual elements together</td>
</tr>
<tr>
<td align="left">Our experiment</td>
<td align="left">Examining cross-modal correspondence between <italic>ambient colors</italic> and taste perception in virtual reality, using black tea as the tastant, considered a neutral tastant by participants and including 8 ambient light colors in a VR environment</td>
<td align="left"/>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s2-5">
<title>2.5 Summary</title>
<p>
<xref ref-type="table" rid="T1">Table 1</xref> summarizes the related works mentioned above. This study has three main differences compared to prior studies:<list list-type="simple">
<list-item>
<p>1) Unlike prior work about taste perception in immersive virtual scenes (<xref ref-type="sec" rid="s2-1">Section 2.1</xref>), our focus was CMC-related stimuli instead of realistic scene simulation.</p>
</list-item>
<list-item>
<p>2) In terms of research about visual-taste CMC in the physical environment (<xref ref-type="sec" rid="s2-2">Section 2.2</xref>), our work is different from the experiments from <xref ref-type="bibr" rid="B31">Koch and Koch (2003)</xref> and <xref ref-type="bibr" rid="B64">Wan et al. (2014)</xref> because our work included a real tastant. In addition, our work differs from studies dealing with ambient light (e.g. <xref ref-type="bibr" rid="B42">Oberfeld et al., 2009</xref>; <xref ref-type="bibr" rid="B57">Spence et al., 2014</xref>). We introduce VR as a platform to determine its level of visual-taste CMC. To do this, several changes and considerations are required but not found in prior research. The use of VR as a medium may bring interdisciplinary insights to applications in VR, which has grown rapidly in the last few years and is positioned to shift significantly how we interact with others, whether for social occasions (e.g., in platforms such as VRChat) or for production purposes (e.g., within Horizon Workrooms) that aim to emulate many aspects of the physical world.</p>
</list-item>
<list-item>
<p>3) Unlike previous work on taste-related CMC in VR (<xref ref-type="sec" rid="s2-3">Section 2.3</xref>), our selection of ambient colors as the visual stimuli is novel and has not yet been explored systematically before. Ambient colors are foundational to VR applications due to their emphasis on visual immersive and focus on extrinsic modification. Our understanding of how ambient colors affect people&#x2019;s perception of taste will enable us to develop VR applications that can be more tailored to specific situations.</p>
</list-item>
</list>
</p>
</sec>
</sec>
<sec id="s3">
<title>3 Experiment 1: Adjustment of the ambient color in the virtual environments (RQ1)</title>
<p>In this experiment, we aim to explore and determine the suitable settings of different ambient color environments, which is the premise for their usage in the follow-up taste perception study. As mentioned in <xref ref-type="sec" rid="s2-4">Section 2.4</xref>, color in VR environments may cause visually induced motion sickness (VIMS) (<xref ref-type="bibr" rid="B3">Bonato et al., 2004</xref>; <xref ref-type="bibr" rid="B49">So and Yuen, 2007</xref>; <xref ref-type="bibr" rid="B21">Gusev et al., 2018</xref>; <xref ref-type="bibr" rid="B20">Grassini et al., 2021</xref>). Directly adapting the colors from former color-taste studies in physical settings to the ambient color in virtual environments with the default light setting may cause discomfort to users. Adjustments to the related lighting parameters for these colors with high visual saturation are necessary. In addition to the hue (i.e., RGB values), there are several parameters influencing the visual presentation and rendering of the environment: ambient mode, compression, environment reflections, light intensity, and bounces in Unity engine (<xref ref-type="bibr" rid="B62">Unity, 2019</xref>). We set our main adjustment parameter as <bold>light intensity</bold> since it directly influences the brightness of the ambient light in the VR scene. Also, as mentioned in <xref ref-type="sec" rid="s2-4">Section 2.4</xref>, it is uncertain whether color recognition in VR deviates from the physical world, which might lead to ambiguous or biased results in color-taste studies. Thus, we also tested the recognition accuracy of the ambient color in VR in Experiment 1.</p>
<sec id="s3-1">
<title>3.1 Stimuli</title>
<p>We utilized the colors described in <xref ref-type="bibr" rid="B64">Wan et al. (2014)</xref>&#x2019;s taste-color synesthesia, including black, green, grey, orange, pink, purple, red, yellow, white, blue, and brown. These colors were used as the hue of the ambient light in the VR environments. The RGB values of these colors are also listed in <xref ref-type="fig" rid="F1">Figure 1</xref>. Specifically, we created 11 hexagonal virtual rooms to demonstrate these ambient colors (see <xref ref-type="fig" rid="F1">Figure 1</xref>). In each room, we placed a spotlight at the top middle of the room to make sure it illuminated the room (i.e., the ambient color covered the entire interior of the room), and assigned the RGB value of the color. The spot angle of the light was 129.8&#xb0;. We set the range for controlling how far the light is emitted from the center of the object from 0 to 20. The default light intensity for each room is 10. We left the rooms empty to prevent any confounding factors.</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption>
<p>Ambient color stimuli that were utilized in this study, with their RGB values, ambient light effects in VR, and the suitable light intensities obtained from Experiment 1. The first row with an arrow represents the colors tested in Experiment 1. The second row indicates the colors in Experiment 2, with white as a baseline color.</p>
</caption>
<graphic xlink:href="frvir-03-1056782-g001.tif"/>
</fig>
</sec>
<sec id="s3-2">
<title>3.2 Participants and apparatus</title>
<p>Twelve healthy participants (6 females, 6 males; mean age &#x3d; 21.2 &#xb1; 1.25&#xa0;years) from a Sino-Chinese university located in a mid-size city in China were recruited for the first experiment. The participants were not paid and joined the experiment voluntarily. They did not have color blindness and did not report any issues regarding color recognition. Seven participants had prior experiences with VR HMD.</p>
<p>We used an Oculus Rift S VR HMD to display the virtual environments. The HMD has a resolution of 1,280 &#xd7; 1,440 px per eye, a refreshing rate of 80&#xa0;Hz, and a 115&#xb0; field of view. Participants could move their heads in 6DOF. It was connected to an Intel Core i7-6700HQ processor laptop with an NVIDIA GeForce GTX 1070 GPU card. Participants could interact with the virtual environment <italic>via</italic> an Oculus hand-held controller. The virtual environments were created using the Unity3D platform (version 2019.1.14f1) and the related scripts were programmed in C&#x23;. The experiment was conducted in an empty and quiet room where the participants were equipped with the VR HMD and controllers. <xref ref-type="fig" rid="F2">Figure 2</xref> shows the setup.</p>
<fig id="F2" position="float">
<label>FIGURE 2</label>
<caption>
<p>Experiment 1 setup. A participant is adjusting the light intensity of pink ambient color in the virtual room displayed <italic>via</italic> an VR HMD, which is connected to a laptop.</p>
</caption>
<graphic xlink:href="frvir-03-1056782-g002.tif"/>
</fig>
</sec>
<sec id="s3-3">
<title>3.3 Experimental design and task</title>
<p>A within-subjects design was used in this experiment. Participants would enter all the virtual rooms to complete the given tasks. When first entering a room, the initial intensity of the tested ambient light was set at 10. Participants were required to adjust the light intensity up and down starting from this value until they found the lighting condition that was the most comfortable. For experimental purposes, we set the adjustment range of the light intensity from 0 (dimmest light) to 20 (brightest light for our 3D assets). Participants could press Button A or B to make small adjustments to the light intensity (increased or decreased by 0.5 per press) and push the thumbstick to make big adjustments (see <xref ref-type="fig" rid="F2">Figure 2</xref>). Once they settled down the light intensity for the current room, they stopped interacting with the controller for about 10&#xa0;s. The finalized light intensity was then recorded by the program. After that, they needed to answer the question &#x201c;what ambient color do you think is the current environment?&#x201d; and complete a Virtual Reality Sickness Questionnaire (VRSQ) (<xref ref-type="bibr" rid="B29">Kim et al., 2018</xref>) in oral form. The experimenter asked these questions from VRSQ one by one and manually recorded the answers. Then the participants moved to the next room. The order of the room for each participant was counterbalanced using a Latin-Square design. Note that in our experimental design, participants&#x2019; answers toward color recognition might not have a relationship with their adjustment of light intensity. Although each participant had encountered several levels of intensity, they were exposed to the same intensity condition at first and were informed that they would make the adjustment for one specific color each time.</p>
</sec>
<sec id="s3-4">
<title>3.4 Procedure</title>
<p>The whole experiment lasted approximately 30&#xa0;min for each participant. Before the formal experiment began, participants were asked to fill in a questionnaire for demographic background. We asked about their prior VR experience and if they had difficulty recognizing colors. Then participants were introduced to the research aims, tasks, controls of the experiment, and the questions that would be asked. Then they wore the VR HMD and started the experiment. After they adjusted the light intensity for all the ambient colors, they would take off the HMD and have a break. After the break, we asked participants to adjust the light intensity for the ambient colors in reverse order. This was intended to eliminate the residue effects from the previous colored room.</p>
</sec>
<sec id="s3-5">
<title>3.5 Results and discussion</title>
<p>The average light intensities for each ambient color are 14.63 for black, 5.72 for green, 11.97 for grey, 6.82 for orange, 7.98 for pink, 6.63 for purple, 3.68 for red, 4.34 for yellow, 4.69 for white, 4.09 for blue, and 12.75 for brown. As mentioned before, the values are between 0 (the dimmest condition) to 20 (the brightest condition).</p>
<p>In total, we collected 24 color recognition results (12 participants &#xd7; 2 rounds). <xref ref-type="fig" rid="F3">Figure 3</xref> summarizes the recognition accuracy for each ambient color. The recognition accuracy for the ambient color of green, blue, red, purple, pink, and orange was high. However, the ambient color of brown and grey was low (29% and 41.5%, respectively). Though not low in recognition accuracy, white was frequently mixed with light pink (4 times). For grey and brown, there was a grey-green, grey-blue confusion, and a brown-pink confusion. <xref ref-type="fig" rid="F4">Figure 4</xref> is the summary of the average VRSQ score for each ambient color following the calculating method provided by <xref ref-type="bibr" rid="B29">Kim et al. (2018)</xref>. A higher score indicates that the simulator sickness caused by the ambient color environment is more severe. Participants regarded brown, grey, and white as more comfortable than other colors in the VR environment after self-adjusting the light intensity. However, the difference was not significant.</p>
<fig id="F3" position="float">
<label>FIGURE 3</label>
<caption>
<p>Recognition accuracy (in percentage) for each ambient color.</p>
</caption>
<graphic xlink:href="frvir-03-1056782-g003.tif"/>
</fig>
<fig id="F4" position="float">
<label>FIGURE 4</label>
<caption>
<p>Average VRSQ scores for each ambient color, the error bars represent the standard deviations.</p>
</caption>
<graphic xlink:href="frvir-03-1056782-g004.tif"/>
</fig>
<p>Colors with low recognition accuracy were excluded from the follow-up experiment. We excluded white as it was mixed with pink while keeping pink since pink could be recognized successfully in the virtual environment. Grey and brown were excluded for the same reason. Although the color recognition accuracy of black was not high, it is an important color showing a high correlation with the bitter taste in the physical environment. So, it was useful to keep it and see its effectiveness in the virtual environment.</p>
<p>The results of recognition accuracy show that gaps may exist between the ambient colors that a developer wants the audience to perceive and the actual perceived color by this audience, especially when creating virtual environments. Developers are suggested to pay attention to color recognition accuracy, especially when color is an important factor in the virtual environment. Another contributing result was the comfortable intensity for the 11 ambient colors in the virtual environment with their VRSQ value, working as a guideline for developers who want to utilize these colors in their designs.</p>
</sec>
</sec>
<sec id="s4">
<title>4 Experiment 2: CMC between ambient color and taste in virtual environments (RQ2)</title>
<p>In this experiment, we investigated how ambient colors would influence users&#x2019; perception of taste in immersive virtual environments, including sweet, sour, bitter, and salty tastes, corresponding to <bold>RQ2</bold>. As mentioned in Section 2.4, we needed a real tastant for the color-taste experiment rather than using words describing different tastes. In addition, the tastant should be as neutral as possible in its perceived taste so that it would not affect participants&#x2019; taste perception. Thus, we run a pilot study to find a neutral tastant before the formal experiment.</p>
<sec id="s4-1">
<title>4.1 Pilot study</title>
<p>We invited the same 12 participants in Experiment 1 to complete this pilot study. The participants filled in a demographic questionnaire before the formal experiment. In the questionnaire, we inquired about their attention to taste in the form of scaling questions from 0-low to 6-high, &#x201c;to how much extent do you pay attention to taste during your daily dieting?&#x201d;. Their average ratings on taste attention were: 5.08 for sweetness, 4.69 for sourness, 3.38 for bitterness, and 5.06 for saltiness. We prepared four common beverages, including apple juice (brand: Huiyuan Juice), black tea (brand: Nongfu spring), coconut water (brand: Goodfarmer), and yogurt soda (brand: Energetic Forest), all of which could be bought at a local grocery shop. Our main consideration was to provide several possible choices of drinks that do not have a strong taste and are also familiar to users. After some trials before the pilot study with a number of possibilities, the four chosen ones represent a wide range. The pilot study then helped us narrow done to one.</p>
<p>We opted to use mini paper cups (100&#xa0;ml) to ensure that it was convenient to drink the beverage while wearing an HMD in the main experiment later. The cups were covered with lids to prevent participants from recognizing the beverage by seeing the content or by smelling the odor (see <xref ref-type="fig" rid="F5">Figure 5</xref>). Participants were asked to drink each of the four beverages, holding it in their mouth for at least 3&#xa0;s before swallowing it. Then they needed to rate the taste of the beverage in terms of sweetness, sourness, bitterness, saltiness, and neutrality using a 7-scale questionnaire. Water was given to participants between two beverages to remove any residual, left-over taste. The sequence of the delivered beverages was counterbalanced. Note that, water was not used as the tastant because it was used as the &#x201c;palate cleanser&#x201d; to clear participants&#x2019; taste from the previous drink, and as such using it as the tastant is not appropriate (<xref ref-type="bibr" rid="B25">Huang et al., 2019</xref>; <xref ref-type="bibr" rid="B7">Chen et al., 2020</xref>; <xref ref-type="bibr" rid="B66">Wang et al., 2020</xref>).</p>
<fig id="F5" position="float">
<label>FIGURE 5</label>
<caption>
<p>Tastant choices and the container of the tastant.</p>
</caption>
<graphic xlink:href="frvir-03-1056782-g005.tif"/>
</fig>
<p>Black tea was rated the most neutral among the four beverages. The remaining tastants have strong sweet tastes based on the ratings. Therefore, we used black tea as the taste stimulus in our experiment.</p>
</sec>
<sec id="s4-2">
<title>4.2 Stimuli</title>
<p>We removed the colors showing low recognition accuracy from our Experiment 1 (as described in <xref ref-type="sec" rid="s3-5">Section 3.5</xref>). Thus, the color stimuli in the main study included black, green, orange, pink, purple, red, yellow, and blue. Although white was excluded, we set it as a starting, ending, and transition color for the experiment to avoid the residue effects from the previous colored room. A similar approach can be found in <xref ref-type="bibr" rid="B57">Spence et al. (2014)</xref>. <xref ref-type="fig" rid="F1">Figure 1</xref> shows a preview of each virtual environment with the RGB parameter and intensity level of each ambient color. Notice that we did not include the 3D model of the beverage in the virtual environment because we wanted to focus on the influence of the immersive environment only. We used black tea as the tastant based on the results of the pilot study. Like the pilot study, the tastant was served in 100&#xa0;ml paper cups covered with lids.</p>
</sec>
<sec id="s4-3">
<title>4.3 Participants and apparatus</title>
<p>We recruited another 16 participants (7 females, 9 males; 22.2 &#xb1; 1.66 years, ranging from 21 to 27&#xa0;years). Thirteen participants had used VR HMD before the experiment and all of them had normal or corrected-to-normal vision and had no history of color blindness. Regarding their drinking behaviors, thirteen participants reported drinking beverages several times a week and twelve participants pay much attention to the taste of the beverage during their daily drinking activities. We used the same VR apparatus as in Experiment 1.</p>
</sec>
<sec id="s4-4">
<title>4.4 Design and task</title>
<p>
<xref ref-type="fig" rid="F6">Figure 6</xref> provides an overview of Experiment 2. We applied a within-subjects design in this experiment with ambient color as the independent variable. As mentioned, we used a white room as a starting, ending, and transition room between the two rooms. The order of ambient color conditions was counterbalanced with a Latin-Square design. In each condition, the participants would be exposed to the virtual room for 30&#xa0;s, starting from when they entered the scene and ending when they heard a notification to drink the tastant. They were asked to have a sip from the 100&#xa0;ml tastant in their mouths for 3&#xa0;s before swallowing it. After they drank the tastant, they were then asked to report their feelings according to the following questions:<list list-type="simple">
<list-item>
<p>&#x2022; <bold>Q2-1.</bold> Can you rate the taste of the drink in the just-finished scene in terms of sweet, sour, bitter, and salty? Ratings are from 1 (very weak) to 7 (very strong).</p>
</list-item>
<list-item>
<p>&#x2022; <bold>Q2-2.</bold> What type of taste among sweetness, sourness, bitterness, and saltiness do you think matches the VR environment the most? You can also choose none if you cannot come up with an association.</p>
</list-item>
<list-item>
<p>&#x2022; <bold>Q2-3.</bold> Do you have some scenarios in your mind when coming up with this association when answering the second question? You should first answer yes or no, and then provide a specific answer if you say yes.</p>
</list-item>
<list-item>
<p>&#x2022; <bold>Q2-4.</bold> How much do you like the just-finished scene when drinking the beverage? Ratings are from 1 (dislike it very much) to 7 (like it very much).</p>
</list-item>
</list>
</p>
<fig id="F6" position="float">
<label>FIGURE 6</label>
<caption>
<p>Overview of experiment 2.</p>
</caption>
<graphic xlink:href="frvir-03-1056782-g006.tif"/>
</fig>
<p>Before going to the next ambient color room, they were exposed to a white room and delivered a mini cup of pure water for palate cleansing. After the participants experienced all eight conditions, they were asked to take the VR HMD off, and answer a post-experiment questionnaire to collect their demographic information.</p>
</sec>
<sec id="s4-5">
<title>4.5 Procedure</title>
<p>The participants were first briefed about the task and procedure of the experiment. They were encouraged to raise any questions if something was unclear. They were also informed that they could stop at any point during the experiment if they felt uncomfortable. The participants then put the VR HMD on and were asked not to take it off before the end of the experiment. Thus, they could not see and did not receive any information about the drinks during the whole experiment. They were first exposed to a white room and were given a cup of pure water to accommodate the &#x2018;blinded&#x2019; drinking action and cleansing palate. After that, they were given the mini cup containing the tastant and kept it in their hand. Next, the experimenter switched the scene to the first room to start the formal experiment. After the participants were immersed in the ambient color virtual environment for 30&#xa0;s, the background voice began to remind the participant of starting drinking. After the drinking is finished, participants were asked for answering the above questions orally. Before going to the next ambient color room, they were exposed to a white room and the cups in their hands were replaced with new ones containing pure water. They then drank the pure water for palate cleansing. After that, they entered the next ambient color room with a new mini cup of tastant delivered, replacing the cup containing pure water. Noticing that, when immersed in the virtual environment without seeing the content in the physical world, most of the participants were unaware that they were drinking the same tastants throughout the study.</p>
<p>After the participants experienced all eight conditions, they were asked to take the VR HMD off, drink the taste stimuli once more, and then report their feelings. Finally, we gave participants a post-experiment questionnaire to collect their demographic information, past VR experience, and beverage consumption habits.</p>
</sec>
<sec id="s4-6">
<title>4.6 Results</title>
<p>SPSS (version 26) was used for data analysis. The results were transformed with Aligned Rank Transform (<xref ref-type="bibr" rid="B68">Wobbrock et al., 2011</xref>). We performed one-way Repeated Measure-(RM-) ANOVA with the transformed data for <bold>Q1-1</bold> and <bold>Q1-4</bold>. If the transformed results violated the assumption of sphericity, we report the results with Greenhouse-Geisser or Huynh-Feldt corrections according to the case (<italic>&#x3f5;</italic> &#x3c; 0.75 or <inline-formula id="inf1">
<mml:math id="m1">
<mml:mo>&#x3e;</mml:mo>
<mml:mn>0.75</mml:mn>
</mml:math>
</inline-formula>, respectively). We used Fisher&#x2019;s exact test to examine whether there is a significant color-taste association (i.e., <bold>Q1-2</bold>). Post-hoc pairwise comparisons were run with Bonferroni corrections if the above tests revealed significant differences or associations.</p>
<sec id="s4-6-1">
<title>4.6.1 Q2-1. Perceived taste of the drink</title>
<p>
<bold>Q2-1</bold> refers to the influence on real taste. <xref ref-type="fig" rid="F7">Figure 7</xref> summarizes the descriptive statistics and the distribution of the ratings of <bold>Q2-1</bold>. As can be seen from the figure, the VR room with pink ambient color (AC) was on average rated the highest for sweet taste (<italic>M</italic> &#x3d; 2.500, SD &#x3d; 1.119) among eight <italic>AC</italic> conditions. For sour taste, the VR room with green <italic>AC</italic> reached the highest average rating (<italic>M</italic> &#x3d; 2.312, SD &#x3d; 1.310). Similarly, bitter taste in this <italic>AC</italic> environment was also rated the highest (<italic>M</italic> &#x3d; 3.250, SD &#x3d; 1.436). Participants did not report a strong feeling of salty taste. The ratings on saltiness were all low, but the VR room with black <italic>AC</italic> reached the highest average score of salty (<italic>M</italic> &#x3d; 1.688, SD &#x3d; 1.158).</p>
<fig id="F7" position="float">
<label>FIGURE 7</label>
<caption>
<p>The descriptive statistics and the distribution of the results of taste rating (<bold>Q2-1</bold>). The dots, triangles, lines and diamonds represent participants&#x2019; ratings, average ratings, median and outliers, respectively.</p>
</caption>
<graphic xlink:href="frvir-03-1056782-g007.tif"/>
</fig>
<p>RM-ANOVA tests revealed a significant main effect of <italic>AC</italic> on the ratings of sweet taste <inline-formula id="inf2">
<mml:math id="m2">
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:mi>F</mml:mi>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:mn>7,105</mml:mn>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>2.296</mml:mn>
<mml:mo>,</mml:mo>
<mml:mspace width="1em"/>
<mml:mi>p</mml:mi>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>0.032</mml:mn>
<mml:mo>,</mml:mo>
<mml:msubsup>
<mml:mrow>
<mml:mi>&#x3b7;</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mi>p</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mn>2</mml:mn>
</mml:mrow>
</mml:msubsup>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>0.133</mml:mn>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:math>
</inline-formula>, but not on sour taste <inline-formula id="inf3">
<mml:math id="m3">
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:mi>F</mml:mi>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:mn>7,105</mml:mn>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>1.547</mml:mn>
<mml:mo>,</mml:mo>
<mml:mspace width="1em"/>
<mml:mi>p</mml:mi>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>0.159</mml:mn>
<mml:mo>,</mml:mo>
<mml:msubsup>
<mml:mrow>
<mml:mi>&#x3b7;</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mi>p</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mn>2</mml:mn>
</mml:mrow>
</mml:msubsup>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>0.094</mml:mn>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:math>
</inline-formula>, bitter taste <inline-formula id="inf4">
<mml:math id="m4">
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:mi>F</mml:mi>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:mn>7,105</mml:mn>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>1.106</mml:mn>
<mml:mo>,</mml:mo>
<mml:mspace width="1em"/>
<mml:mi>p</mml:mi>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>0.365</mml:mn>
<mml:mo>,</mml:mo>
<mml:msubsup>
<mml:mrow>
<mml:mi>&#x3b7;</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mi>p</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mn>2</mml:mn>
</mml:mrow>
</mml:msubsup>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>0.069</mml:mn>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:math>
</inline-formula>, or salty taste <inline-formula id="inf5">
<mml:math id="m5">
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:mi>F</mml:mi>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:mn>3.716</mml:mn>
<mml:mo>,</mml:mo>
<mml:mn>55.736</mml:mn>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>0.957</mml:mn>
<mml:mo>,</mml:mo>
<mml:mspace width="1em"/>
<mml:mi>p</mml:mi>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>0.466</mml:mn>
<mml:mo>,</mml:mo>
<mml:msubsup>
<mml:mrow>
<mml:mi>&#x3b7;</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mi>p</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mn>2</mml:mn>
</mml:mrow>
</mml:msubsup>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>0.060</mml:mn>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:math>
</inline-formula>. We further performed a post-hoc analysis with the ratings of sweet taste. However, the pairwise comparison did not show significant differences in the ratings of sweet taste in the different <italic>AC</italic> conditions.</p>
</sec>
<sec id="s4-6-2">
<title>4.6.2 Q2-2. Color-taste associations</title>
<p>
<bold>Q2-2</bold> focuses on scenario-taste matching. <xref ref-type="fig" rid="F8">Figure 8</xref> shows the frequencies of taste matching for different ambient color environments. Results of Fisher&#x2019;s exact test show that there was a significant association between color and taste (<italic>&#x3c7;</italic>
<sup>2</sup> &#x3d; 87.230, <italic>p</italic> &#x3c; 0.001). The frequencies and pairwise comparisons are summarized in <xref ref-type="table" rid="T2">Table 2</xref>. Black and green showed a significant association with bitter taste compared with sweet. Pink and purple presented a significant association with sweetness rather than bitterness. No significant difference was observed in the other colors. According to the frequencies, sour was associated more frequently with yellow, while salty was associated more frequently with blue.</p>
<fig id="F8" position="float">
<label>FIGURE 8</label>
<caption>
<p>The frequencies and pairwise comparisons of the answers to <bold>Q2-2</bold> in Experiment 2. The colors in the figure correspond to the ambient colors in the virtual envrionmes.</p>
</caption>
<graphic xlink:href="frvir-03-1056782-g008.tif"/>
</fig>
<table-wrap id="T2" position="float">
<label>TABLE 2</label>
<caption>
<p>The frequencies and pairwise comparisons of the answers to Q2-2 in Experiment 2. The values in bold indicate the most frequently selected taste. Each subscript letter denotes a subset of color categories whose proportions do not differ significantly from each other at the 0.05 level reading across a row.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="left"/>
<th align="left">Sweet</th>
<th align="left">Sour</th>
<th align="left">Bitter</th>
<th align="left">Salty</th>
<th align="left">None</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="left">black</td>
<td align="left">0<sub>c</sub>
</td>
<td align="left">0 <sub>a, c</sub>
</td>
<td align="left">
<bold>8</bold>
<sub>
<bold>a, b</bold>
</sub>
</td>
<td align="left">2<sub>a, b, c</sub>
</td>
<td align="left">6<sub>b</sub>
</td>
</tr>
<tr>
<td align="left">blue</td>
<td align="left">0<sub>b</sub>
</td>
<td align="left">3<sub>a, b</sub>
</td>
<td align="left">
<bold>7</bold>
<sub>
<bold>a</bold>
</sub>
</td>
<td align="left">3<sub>a</sub>
</td>
<td align="left">3<sub>a</sub>
</td>
</tr>
<tr>
<td align="left">green</td>
<td align="left">2<sub>b</sub>
</td>
<td align="left">3<sub>a, b</sub>
</td>
<td align="left">10<sub>
<bold>a</bold>
</sub>
</td>
<td align="left">1<sub>a, b</sub>
</td>
<td align="left">0<sub>a, b</sub>
</td>
</tr>
<tr>
<td align="left">orange</td>
<td align="left">
<bold>11</bold>
<sub>
<bold>a</bold>
</sub>
</td>
<td align="left">1<sub>a</sub>
</td>
<td align="left">2<sub>a</sub>
</td>
<td align="left">2<sub>a</sub>
</td>
<td align="left">0<sub>a, b</sub>
</td>
</tr>
<tr>
<td align="left">pink</td>
<td align="left">
<bold>13</bold>
<sub>
<bold>b</bold>
</sub>
</td>
<td align="left">3<sub>a, b</sub>
</td>
<td align="left">0<sub>a</sub>
</td>
<td align="left">0<sub>a, b</sub>
</td>
<td align="left">0<sub>a, b</sub>
</td>
</tr>
<tr>
<td align="left">purple</td>
<td align="left">
<bold>9</bold>
<sub>
<bold>b</bold>
</sub>
</td>
<td align="left">4<sub>a, b</sub>
</td>
<td align="left">0<sub>a</sub>
</td>
<td align="left">1<sub>a, b</sub>
</td>
<td align="left">2<sub>a, b</sub>
</td>
</tr>
<tr>
<td align="left">red</td>
<td align="left">
<bold>5</bold>
<sub>
<bold>a</bold>
</sub>
</td>
<td align="left">
<bold>5</bold>
<sub>
<bold>a</bold>
</sub>
</td>
<td align="left">2<sub>a</sub>
</td>
<td align="left">1<sub>a</sub>
</td>
<td align="left">3<sub>a</sub>
</td>
</tr>
<tr>
<td align="left">yellow</td>
<td align="left">2<sub>a</sub>
</td>
<td align="left">
<bold>6</bold>
<sub>
<bold>a</bold>
</sub>
</td>
<td align="left">
<bold>6</bold>
<sub>
<bold>a</bold>
</sub>
</td>
<td align="left">2<sub>a</sub>
</td>
<td align="left">0<sub>a</sub>
</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s4-6-3">
<title>4.6.3 Q2-3. Association of VR environment to real scenarios</title>
<p>
<xref ref-type="fig" rid="F9">Figure 9</xref> shows the participants&#x2019; reported real scenarios associated with the ambient light virtual environments, after matching them with the taste. Their answers were listed on the right side, with lines in colors indicating the association was under which ambient color virtual environment. Overall, there were three types of answers: scenes, food, and feelings. There were both answers related to foodstuff and non-foodstuff. Most participants associated sweet taste with food-related items. For example, after being exposed to the pink ambient light in the virtual environment, some participants came up with &#x201c;candy&#x201d; in their minds. Other common answers were lemon-yellow ambient light, apple-green ambient light, and oranges-orange ambient light. On the contrary, participants came up with specific scenes other than food (e.g., sea and night) after matching the ambient light with bitterness. Regarding the success of coming up with scenarios in their minds, associated scenarios were frequently described after being matched with sweetness (43, 21, 29, and 5 for the number of answers in sweet, sour, bitter, and salty taste correspondingly). Few participants described scenarios after matching the ambient color environment with the salty taste.</p>
<fig id="F9" position="float">
<label>FIGURE 9</label>
<caption>
<p>General associated objects and scenes under different taste choice (<bold>Q2-3</bold>). Participants gave the answers after matching the ambient color with the taste. Answers of associated scenario were categorized into food and non-food and listed in the right side. Lines were colored for indicating what ambient colors the answers were based on. Notice that, not all the conditions have associated scenarios.</p>
</caption>
<graphic xlink:href="frvir-03-1056782-g009.tif"/>
</fig>
</sec>
<sec id="s4-6-4">
<title>4.6.4 Q2-4. Degree of liking of ambient colors</title>
<p>
<xref ref-type="fig" rid="F10">Figure 10</xref> provides an overview of the participants&#x2019; answers to <bold>Q1-4</bold>. It can be seen from the figure that participants preferred orange and pink environments. Black, green, and red ambient environments were disliked by some participants. There was a significant main effect on the ratings in their preferred VR room (<italic>F</italic> (7, 105) &#x3d; 2.463, <italic>p</italic> &#x3d; 0.022). Post-hoc pairwise comparisons showed that ratings of orange (<italic>M</italic> &#x3d; 4.313, SD &#x3d; 1.158) was significantly higher than red (<italic>M</italic> &#x3d; 3.000, SD &#x3d; 1.173; <italic>p</italic> &#x3d; 0.024, adjusted).</p>
<fig id="F10" position="float">
<label>FIGURE 10</label>
<caption>
<p>The descriptive statistics and the distribution of the results of color environment liking (<bold>Q2-4</bold>). The dots, triangles, lines and diamonds represent participants&#x2019; ratings, average ratings, median and outliers, respectively. Asterisks mark the comparison that differed significantly (<italic>p</italic> &#x3c; 0.05) in the post-hoc pairwise comparison.</p>
</caption>
<graphic xlink:href="frvir-03-1056782-g010.tif"/>
</fig>
</sec>
</sec>
</sec>
<sec sec-type="discussion" id="s5">
<title>5 Discussion</title>
<sec id="s5-1">
<title>5.1 Comparison with related work</title>
<p>In this work, we focused on exploring how environmental colors would affect people&#x2019;s taste perception in VR than verifying a psychological phenomenon in VR. We used VR as a platform for providing new perceptions and a higher level of immersion to study the cross-modal correspondence (CMC) experience.</p>
<p>As listed in the Related Work (<xref ref-type="sec" rid="s2">Section 2</xref>), our research can be positioned differently from prior work in physical and VR settings. Our results in taste association present an interesting comparison with the results given by studies conducted using a desktop computer screen as a medium for showing colors (<xref ref-type="bibr" rid="B64">Wan et al., 2014</xref>). Pink was significantly associated with sweetness in both VR and the real world. However, different from <xref ref-type="bibr" rid="B64">Wan et al. (2014)</xref>&#x2019;s results from experiments in the real world where green was found to be more associated with sourness and black with a bitter taste, our results in VR showed that green can also be associated with a bitter taste. This indicates that the same color studied <italic>via</italic> different mediums may bring the same or different results in taste perception.</p>
<p>Unlike prior work regarding taste-related CMC in virtual environments, our selection of ambient colors as visual stimuli is novel and has not been explored systematically before. Choosing a parameter that is not related to the environment (e.g., color change of the foodstuff, change of the container shape) may lose the benefit of the immersion provided by VR headsets. In addition, our focus is on the basic aspect of VR than the complex ones. The color of ambient light suits these requirements. Our work is different from <xref ref-type="bibr" rid="B7">Chen et al. (2020)</xref> which used VR environments involving a diversity of visual elements, surface textures, and types of rooms. Moreover, while the experiment by <xref ref-type="bibr" rid="B66">Wang et al. (2020)</xref> and <xref ref-type="bibr" rid="B25">Huang et al. (2019)</xref> focused on the changing the color of the drink in VR (i.e., an intrinsic property of the drinks), our study dealt with the ambient color of the VR environment (i.e., an extrinsic property of the drinks). In our experiment, the sweetness of the beverage seems to be increased by the pink environment, and the orange VR environment was preferable. These findings can be compared with <xref ref-type="bibr" rid="B66">Wang et al. (2020)</xref>&#x2019;s results showing that the intrinsic beverage color did not influence the perceived sweetness or liking of the beverage.</p>
</sec>
<sec id="s5-2">
<title>5.2 Conceptual model and emotional valence under the ambient colors</title>
<p>Participants&#x2019; answer to the associated scene in the VR environment shows that most participants&#x2019; color-taste associations were based on specific scenarios. This mainly includes foodstuff and non-food-related scenarios. According to the explanation by <xref ref-type="bibr" rid="B54">Spence (2019b)</xref> (see <xref ref-type="sec" rid="s2-2">Section 2.2</xref>), people can often point to specific <italic>source object(s)</italic> that embody both color and taste, which is also in line with the associated scenarios regarding foodstuff in our experiment. The ambient color may trigger participants&#x2019; recall of real-life experiences with food, and then the association with taste would come up based on these experiences. The variety of answers indicates the association may be personal and subjective, which may also be a topic for further investigation.</p>
<p>Some answers about scenarios other than foodstuff could be explained by <italic>emotional mediation</italic> (<xref ref-type="bibr" rid="B51">Spence, 2011</xref>) (see <xref ref-type="sec" rid="s2-2">Section 2.2</xref>) of the relation between taste and the words regarding emotion in a particular culture. Categorized by taste association, some answers such as &#x201c;chemical poison&#x201d;, &#x201c;nursing house&#x201d;, &#x201c;horror movie&#x201d; appeared under bitter association while &#x201c;vineyard&#x201d;, &#x201c;club&#x201d;, &#x201c;children&#x2019;s room&#x201d; appeared under sweet association. When categorized by color, there were some negative words like &#x201c;prison&#x201d; for the black virtual environment, &#x201c;horror&#x201d; for the red virtual environment, and &#x201c;depressed&#x201d; for the green virtual environment. This kind of negative association observed with certain ambient colors may be in line with some recent studies that have explored negative emotions in VR (<xref ref-type="bibr" rid="B33">Lavoie et al., 2021</xref>; <xref ref-type="bibr" rid="B38">Magdin et al., 2021</xref>). These studies showed that virtual environments involving a higher level of absorption may increase negative emotional responses. Also, <xref ref-type="bibr" rid="B15">Elliot et al. (2007)</xref> suggested that attention must be paid to how color can act as a subtle environmental cue that has important influences on human behavior. Regarding color study in virtual environments, research work has shown that the context in a virtual environment where the ambient color was assigned can also influence emotion and valance (<xref ref-type="bibr" rid="B36">Lipson-Smith et al., 2020</xref>). This leads to a future question to be discussed since, in this experiment, we did not include 3D models in the scenes. Based on this, we suggest that future researchers or designers applying ambient color in VR systems can pay special attention to the emotional association the color can trigger.</p>
</sec>
<sec id="s5-3">
<title>5.3 Future applications</title>
<p>Our experiment results could contribute to the design of human-food applications, presenting the possibility of how the dieting experience can be altered by using specific environmental colors. Researchers have looked into the use of diet activities as positive psychological interventions to increase everyday happiness and well-being (<xref ref-type="bibr" rid="B17">Fischler, 2011</xref>; <xref ref-type="bibr" rid="B9">Cook et al., 2020</xref>). Moreover, existing works have explored what and how advanced technology can support healthy dieting using wearable devices and mobile apps (<xref ref-type="bibr" rid="B14">Dong and Biswas, 2013</xref>; <xref ref-type="bibr" rid="B44">Pan et al., 2019</xref>). Our work shows the possibility of using VR to change people&#x2019;s taste perception, especially the sensation of sweet taste. Given the health issues caused by high levels of sugary drink intake (<xref ref-type="bibr" rid="B16">Essman et al., 2021</xref>; <xref ref-type="bibr" rid="B35">Leung et al., 2021</xref>), our findings could have applications to support healthier drinking habits. We found that modifying the ambient color in a VR environment can enhance users&#x2019; sensation of the sweet taste of the beverage. This approach provides a novel beverage-drinking experience and a cost-effective approach to changing users&#x2019; diet behavior (by providing users&#x2019; perception of sweetness with drinks that are lower in actual sugar content). Future research could look into this potential application <italic>via</italic> longitudinal experiments.</p>
</sec>
</sec>
<sec id="s6">
<title>6 Limitations and future work</title>
<p>The first limitation of this research is that our sample population is limited to university students within a similar age group and is relatively small. To examine the topic further, a larger sample size including different age groups may be helpful taking our experiment framework as a reference. Also, in our experiments, participants did the taste ratings after receiving the stimuli at a consistent time interval. Further work is needed to check whether the same results will be obtained and whether other insights will be found when we extend or shorten the exposure time of the ambient color. Moreover, the aroma of the tea can be crucial for influencing the taste and could represent a bias for studying taste perception. However, our focus was the influence of ambient light on basic tastes, and we did not want to introduce the aroma of tea as another variable. We tried to reduce the influence of tea aroma on taste perception by covering it with lids for participants to hold in their hands to reduce the influence. Despite these limitations, we believe that our work contributes to the initial direction and provides a solid foundation on top of which further research could be conducted to better understand CMC, especially the ones evoked by ambient color in virtual environments.</p>
<p>Thinking more broadly, the virtual environment can be rendered in various ways and the VR HMD we used in this experiment is just one of them. In other types of virtual environments, including CAVE, the screen-based interface can also be taken into account to see if there is a difference in taste perception or not. Moreover, the only beverage we used was a neutral drink. In the future, it could be interesting to use several types of beverages and explore whether for example using beverages with a strong taste can have a greater or lesser effect.</p>
</sec>
<sec sec-type="conclusion" id="s7">
<title>7 Conclusion</title>
<p>Our research provides a solid example of using a Virtual Reality (VR) environment as an experimental platform to study the cross-modal correspondence between ambient color and taste perception. Results from the first experiment suggest the need for paying attention to the visually induced motion sickness and color recognition of people when doing color-related experiments in VR environments. Results from the second experiment demonstrated that the pink ambient color could change people&#x2019;s perception of the sweet taste of a neutral drink. The black and green ambient colors are associated with bitterness, while pink and purple are linked with sweetness. Moreover, the orange ambient color is preferable for drinking beverages in VR. Our research fills a gap in cross-modal correspondence targeting ambient color in VR, providing helpful and practical insights that can be applied to human-food interaction and VR applications that involve socialization and healthy habit promotion. Its results can also form the basis for further research.</p>
</sec>
</body>
<back>
<sec sec-type="data-availability" id="s8">
<title>Data availability statement</title>
<p>The raw data supporting the conclusion of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec id="s9">
<title>Ethics statement</title>
<p>The studies involving human participants were reviewed and approved by University Research Committee. The patients/participants provided their written informed consent to participate in this study.</p>
</sec>
<sec id="s10">
<title>Author contributions</title>
<p>All authors listed have made a substantial, direct, and intellectual contribution to the work and approved it for publication. ZW and MJ conducted this work when they were affiliated with Xi&#x2019;an Jiaotong-Liverpool University.</p>
</sec>
<sec id="s11">
<title>Funding</title>
<p>This work is funded in part by Xi&#x2019;an Jiaotong-Liverpool University (XJTLU) Key Special Fund (KSF-A-03) and XJTLU Research Development Fund (RDF-17-01-54), and the Natural Science Foundation of the Jiangsu Higher Education Institutions of China (22KJB520038).</p>
</sec>
<ack>
<p>The authors want to thank the participants for volunteering their time to join the experiments. We also thank the reviewers for the insightful comments and suggestions that helped improve our paper.</p>
</ack>
<sec sec-type="COI-statement" id="s12">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="disclaimer" id="s13">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<fn-group>
<fn id="fn2">
<label>1</label>
<p>Umami is a category of taste in food (besides sweet, sour, salt, and bitter), corresponding to the flavor of glutamates, especially monosodium glutamate.</p>
</fn>
<fn id="fn3">
<label>2</label>
<p>
<ext-link ext-link-type="uri" xlink:href="https://hello.vrchat.com/">https://hello.vrchat.com/</ext-link>.</p>
</fn>
<fn id="fn4">
<label>3</label>
<p>
<ext-link ext-link-type="uri" xlink:href="https://www.oculus.com/workrooms/">https://www.oculus.com/workrooms/</ext-link>.</p>
</fn>
</fn-group>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Arnold</surname>
<given-names>P.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>You better eat to survive! exploring edible interactions in a virtual reality game</article-title>. <source>Proc. 2017 CHI Conf. Ext. Abstr. Hum. Factors Comput. Syst.</source> <volume>&#x2019;17</volume>, <fpage>206</fpage>&#x2013;<lpage>209</lpage>. <pub-id pub-id-type="doi">10.1145/3027063.3048408</pub-id>
</citation>
</ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bangcuyo</surname>
<given-names>R. G.</given-names>
</name>
<name>
<surname>Smith</surname>
<given-names>K. J.</given-names>
</name>
<name>
<surname>Zumach</surname>
<given-names>J. L.</given-names>
</name>
<name>
<surname>Pierce</surname>
<given-names>A. M.</given-names>
</name>
<name>
<surname>Guttman</surname>
<given-names>G. A.</given-names>
</name>
<name>
<surname>Simons</surname>
<given-names>C. T.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>The use of immersive technologies to improve consumer testing: The role of ecological validity, context and engagement in evaluating coffee</article-title>. <source>Food Qual. Prefer.</source> <volume>41</volume>, <fpage>84</fpage>&#x2013;<lpage>95</lpage>. <pub-id pub-id-type="doi">10.1016/j.foodqual.2014.11.017</pub-id>
</citation>
</ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bonato</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Bubka</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Alfieri</surname>
<given-names>L.</given-names>
</name>
</person-group> (<year>2004</year>). <article-title>Display color affects motion sickness symptoms in an optokinetic drum</article-title>. <source>Aviat. Space Environ. Med.</source> <volume>75</volume>, <fpage>306</fpage>&#x2013;<lpage>311</lpage>.</citation>
</ref>
<ref id="B4">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Cardello</surname>
<given-names>A. V.</given-names>
</name>
</person-group> (<year>1994</year>). <source>Consumer expectations and their role in food acceptance</source>. <publisher-loc>Boston, MA</publisher-loc>: <publisher-name>Springer US</publisher-name>, <fpage>253</fpage>&#x2013;<lpage>297</lpage>. <pub-id pub-id-type="doi">10.1007/978-1-4615-2171-6_10</pub-id>
</citation>
</ref>
<ref id="B5">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Carlson</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Kirpes</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Pavlik</surname>
<given-names>R. A.</given-names>
</name>
<name>
<surname>Vance</surname>
<given-names>J. M.</given-names>
</name>
<name>
<surname>Yin</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Scott-Cooper</surname>
<given-names>T.</given-names>
</name>
<etal/>
</person-group> (<year>2011</year>). &#x201c;<article-title>Comparison of single-wall versus multi-wall immersive environments to support a virtual shopping experience</article-title>,&#x201d; in <conf-name>ASME 2011 World Conference on Innovative Virtual Reality</conf-name>, <conf-loc>Milan, Italy</conf-loc>, <conf-date>June 27&#x2013;29, 2011</conf-date> (<publisher-name>World Conference on Innovative Virtual Reality</publisher-name>), <fpage>287</fpage>&#x2013;<lpage>291</lpage>. <pub-id pub-id-type="doi">10.1115/WINVR2011-5582</pub-id>
</citation>
</ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Charles</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Carmel</surname>
<given-names>A., L.</given-names>
</name>
<name>
<surname>Maya</surname>
<given-names>U. S.</given-names>
</name>
<name>
<surname>Massimiliano</surname>
<given-names>Z.</given-names>
</name>
</person-group> (<year>2010</year>). <article-title>Does food color influence taste and flavor perception in humans?</article-title> <source>Chemosens. Percept.</source> <volume>3</volume>, <fpage>68</fpage>&#x2013;<lpage>84</lpage>. <pub-id pub-id-type="doi">10.1007/s12078-010-9067-z</pub-id>
</citation>
</ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chen</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Huang</surname>
<given-names>A. X.</given-names>
</name>
<name>
<surname>Faber</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Makransky</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Perez-Cueto</surname>
<given-names>F. J. A.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Assessing the influence of visual-taste congruency on perceived sweetness and product liking in immersive vr</article-title>. <source>Foods</source> <volume>9</volume>, <fpage>465</fpage>. <pub-id pub-id-type="doi">10.3390/foods9040465</pub-id>
</citation>
</ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Clydesdale</surname>
<given-names>F. M.</given-names>
</name>
<name>
<surname>Gover</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Fugardi</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>1992</year>). <article-title>The effect of color on thirst quenching, sweetness, acceptability and flavor intensity in fruit punch flavored beverages</article-title>. <source>J. Food Qual.</source> <volume>15</volume>, <fpage>19</fpage>&#x2013;<lpage>38</lpage>. <pub-id pub-id-type="doi">10.1111/j.1745-4557.1992.tb00973.x</pub-id>
</citation>
</ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Cook</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Kuntsche</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Labhart</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Kuntsche</surname>
<given-names>E.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Do different drinks make you feel different emotions? Examination of young adolescents&#x2019; beverage-specific alcohol expectancies using the alcohol expectancy task</article-title>. <source>Addict. Behav.</source> <volume>106</volume>, <fpage>106375</fpage>. <pub-id pub-id-type="doi">10.1016/j.addbeh.2020.106375</pub-id>
</citation>
</ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Crisinel</surname>
<given-names>A.-S.</given-names>
</name>
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2010</year>). <article-title>As bitter as a trombone: Synesthetic correspondences in nonsynesthetes between tastes/flavors and musical notes</article-title>. <source>Atten. Percept. Psychophys.</source> <volume>72</volume>, <fpage>1994</fpage>&#x2013;<lpage>2002</lpage>. <pub-id pub-id-type="doi">10.3758/APP.72.7.1994</pub-id>
</citation>
</ref>
<ref id="B11">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Crisinel</surname>
<given-names>A.-S.</given-names>
</name>
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2009</year>). <article-title>Implicit association between basic tastes and pitch</article-title>. <source>Neurosci. Lett.</source> <volume>464</volume>, <fpage>39</fpage>&#x2013;<lpage>42</lpage>. <pub-id pub-id-type="doi">10.1016/j.neulet.2009.08.016</pub-id>
</citation>
</ref>
<ref id="B12">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Crofton</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Murray</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Botinestean</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Exploring the effects of immersive virtual reality environments on sensory perception of beef steaks and chocolate</article-title>. <source>Foods</source> <volume>10</volume>, <fpage>1154</fpage>. <pub-id pub-id-type="doi">10.3390/foods10061154</pub-id>
</citation>
</ref>
<ref id="B13">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Delarue</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Brasset</surname>
<given-names>A.-C.</given-names>
</name>
<name>
<surname>Jarrot</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Abiven</surname>
<given-names>F.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Taking control of product testing context thanks to a multi-sensory immersive room. a case study on alcohol-free beer</article-title>. <source>Food Qual. Prefer.</source> <volume>75</volume>, <fpage>78</fpage>&#x2013;<lpage>86</lpage>. <pub-id pub-id-type="doi">10.1016/j.foodqual.2019.02.012</pub-id>
</citation>
</ref>
<ref id="B14">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Dong</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Biswas</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2013</year>). &#x201c;<article-title>Wearable diet monitoring through breathing signal analysis</article-title>,&#x201d; in <conf-name>2013 35th Annual International Conference of the IEEE Engineering in Medicine and Biology Society (EMBC)</conf-name>, <conf-loc>Osaka, Japan</conf-loc>, <conf-date>03-07 July 2013</conf-date>, <fpage>1186</fpage>&#x2013;<lpage>1189</lpage>. <pub-id pub-id-type="doi">10.1109/EMBC.2013.6609718</pub-id>
</citation>
</ref>
<ref id="B15">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Elliot</surname>
<given-names>A. J.</given-names>
</name>
<name>
<surname>Maier</surname>
<given-names>M. A.</given-names>
</name>
<name>
<surname>Moller</surname>
<given-names>A. C.</given-names>
</name>
<name>
<surname>Friedman</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Meinhardt</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2007</year>). <article-title>Color and psychological functioning: The effect of red on performance attainment</article-title>. <source>J. Exp. Psychol. General</source> <volume>136</volume>, <fpage>154</fpage>&#x2013;<lpage>168</lpage>. <pub-id pub-id-type="doi">10.1037/0096-3445.136.1.154</pub-id>
</citation>
</ref>
<ref id="B16">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Essman</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Stoltze</surname>
<given-names>F. M.</given-names>
</name>
<name>
<surname>Carpentier</surname>
<given-names>F. D.</given-names>
</name>
<name>
<surname>Swart</surname>
<given-names>E. C.</given-names>
</name>
<name>
<surname>Taillie</surname>
<given-names>L. S.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Examining the news media reaction to a national sugary beverage tax in South Africa: A quantitative content analysis</article-title>. <source>BMC public health</source> <volume>21</volume>, <fpage>454</fpage>&#x2013;<lpage>514</lpage>. <pub-id pub-id-type="doi">10.1186/s12889-021-10460-1</pub-id>
</citation>
</ref>
<ref id="B17">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fischler</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2011</year>). <article-title>Commensality, society and culture</article-title>. <source>Soc. Sci. Inf.</source> <volume>50</volume>, <fpage>528</fpage>&#x2013;<lpage>548</lpage>. <pub-id pub-id-type="doi">10.1177/0539018411413963</pub-id>
</citation>
</ref>
<ref id="B18">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Gallace</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Ngo</surname>
<given-names>M. K.</given-names>
</name>
<name>
<surname>Sulaitis</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2012</year>). &#x201c;<article-title>Multisensory presence in virtual reality: Possibilities &#x26; limitations</article-title>,&#x201d; in <source>Multiple sensorial media advances and applications: New developments in MulSeMedia</source> (<publisher-loc>Pennsylvania, United States</publisher-loc>: <publisher-name>IGI Global</publisher-name>), <fpage>1</fpage>&#x2013;<lpage>38</lpage>. <pub-id pub-id-type="doi">10.4018/978-1-60960-821-7.ch001</pub-id>
</citation>
</ref>
<ref id="B19">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gotow</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Skrandies</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Kobayashi</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Kobayakawa</surname>
<given-names>T.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Traditional Japanese confection overseas: Cultural difference and retronasal aroma affect flavor preference and umami perception</article-title>. <source>Food Qual. Prefer.</source> <volume>92</volume>, <fpage>104204</fpage>. <pub-id pub-id-type="doi">10.1016/j.foodqual.2021.104204</pub-id>
</citation>
</ref>
<ref id="B20">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Grassini</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Laumann</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>de Martin Topranin</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Thorp</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Evaluating the effect of multi-sensory stimulations on simulator sickness and sense of presence during hmd-mediated vr experience</article-title>. <source>Ergonomics</source> <volume>64</volume>, <fpage>1532</fpage>&#x2013;<lpage>1542</lpage>. <pub-id pub-id-type="doi">10.1080/00140139.2021.1941279</pub-id>
</citation>
</ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gusev</surname>
<given-names>D. A.</given-names>
</name>
<name>
<surname>Eschbach</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Westin</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Yong</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Motion-sickness-related aspects of inclusion of color-deficient observers in virtual reality</article-title>. <source>Int. J. Child Health Hum. Dev.</source> <volume>11</volume>, <fpage>177</fpage>&#x2013;<lpage>181</lpage>.</citation>
</ref>
<ref id="B22">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Gusev</surname>
<given-names>D. A.</given-names>
</name>
<name>
<surname>Whittinghill</surname>
<given-names>D. M.</given-names>
</name>
<name>
<surname>Yong</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2016</year>). &#x201c;<article-title>A simulator to study the effects of color and color blindness on motion sickness in virtual reality using head-mounted displays</article-title>,&#x201d; in <source>Mobile and wireless technologies 2016</source>. Editors <person-group person-group-type="editor">
<name>
<surname>Kim</surname>
<given-names>K. J.</given-names>
</name>
<name>
<surname>Wattanapongsakorn</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Joukov</surname>
<given-names>N.</given-names>
</name>
</person-group> (<publisher-loc>Singapore</publisher-loc>: <publisher-name>Springer Singapore</publisher-name>), <fpage>197</fpage>&#x2013;<lpage>204</lpage>.</citation>
</ref>
<ref id="B23">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Halabi</surname>
<given-names>O.</given-names>
</name>
<name>
<surname>Saleh</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Augmented reality flavor: Cross-modal mapping across gustation, olfaction, and vision</article-title>. <source>Multimed. Tools Appl.</source> <volume>1</volume>, <fpage>36423</fpage>&#x2013;<lpage>36441</lpage>. <pub-id pub-id-type="doi">10.1007/s11042-021-11321-0</pub-id>
</citation>
</ref>
<ref id="B24">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Harrar</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Piqueras-Fiszman</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2011</year>). <article-title>There&#x2019;s more to taste in a coloured bowl</article-title>. <source>Perception</source> <volume>40</volume>, <fpage>880</fpage>&#x2013;<lpage>882</lpage>. <pub-id pub-id-type="doi">10.1068/p7040</pub-id>
</citation>
</ref>
<ref id="B25">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Huang</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Huang</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Wan</surname>
<given-names>X.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Influence of virtual color on taste: Multisensory integration between virtual and real worlds</article-title>. <source>Comput. Hum. Behav.</source> <volume>95</volume>, <fpage>168</fpage>&#x2013;<lpage>174</lpage>. <pub-id pub-id-type="doi">10.1016/j.chb.2019.01.027</pub-id>
</citation>
</ref>
<ref id="B26">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Johnson</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Clydesdale</surname>
<given-names>F. M.</given-names>
</name>
</person-group> (<year>1982</year>). <article-title>Perceived sweetness and redness in colored sucrose solutions</article-title>. <source>J. Food Sci.</source> <volume>47</volume>, <fpage>747</fpage>&#x2013;<lpage>752</lpage>. <pub-id pub-id-type="doi">10.1111/j.1365-2621.1982.tb12706.x</pub-id>
</citation>
</ref>
<ref id="B27">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Keast</surname>
<given-names>R. S.</given-names>
</name>
<name>
<surname>Breslin</surname>
<given-names>P. A.</given-names>
</name>
</person-group> (<year>2003</year>). <article-title>An overview of binary taste&#x2013;taste interactions</article-title>. <source>Food Qual. Prefer.</source> <volume>14</volume>, <fpage>111</fpage>&#x2013;<lpage>124</lpage>. <pub-id pub-id-type="doi">10.1016/S0950-3293(02)00110-6</pub-id>
</citation>
</ref>
<ref id="B28">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kerruish</surname>
<given-names>E.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Arranging sensations: Smell and taste in augmented and virtual reality</article-title>. <source>Senses Soc.</source> <volume>14</volume>, <fpage>31</fpage>&#x2013;<lpage>45</lpage>. <pub-id pub-id-type="doi">10.1080/17458927.2018.1556952</pub-id>
</citation>
</ref>
<ref id="B29">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kim</surname>
<given-names>H. K.</given-names>
</name>
<name>
<surname>Park</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Choi</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Choe</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Virtual reality sickness questionnaire (vrsq): Motion sickness measurement index in a virtual reality environment</article-title>. <source>Appl. Ergon.</source> <volume>69</volume>, <fpage>66</fpage>&#x2013;<lpage>73</lpage>. <pub-id pub-id-type="doi">10.1016/j.apergo.2017.12.016</pub-id>
</citation>
</ref>
<ref id="B30">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kn&#xf6;ferle</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>Crossmodal correspondences between sounds and tastes</article-title>. <source>Psychon. Bull. Rev.</source> <volume>1</volume>, <fpage>992</fpage>&#x2013;<lpage>1006</lpage>. <pub-id pub-id-type="doi">10.3758/s13423-012-0321-z</pub-id>
</citation>
</ref>
<ref id="B31">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Koch</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Koch</surname>
<given-names>E. C.</given-names>
</name>
</person-group> (<year>2003</year>). <article-title>Preconceptions of taste based on color</article-title>. <source>J. Psychol.</source> <volume>137</volume>, <fpage>233</fpage>&#x2013;<lpage>242</lpage>. <pub-id pub-id-type="doi">10.1080/00223980309600611</pub-id>
</citation>
</ref>
<ref id="B32">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kuliga</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Thrash</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Dalton</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>H&#xf6;lscher</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Virtual reality as an empirical research tool &#x2014; Exploring user experience in a real building and a corresponding virtual model</article-title>. <source>Comput. Environ. Urban Syst.</source> <volume>54</volume>, <fpage>363</fpage>&#x2013;<lpage>375</lpage>. <pub-id pub-id-type="doi">10.1016/j.compenvurbsys.2015.09.006</pub-id>
</citation>
</ref>
<ref id="B33">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lavoie</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Main</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>King</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>King</surname>
<given-names>D.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Virtual experience, real consequences: The potential negative emotional consequences of virtual reality gameplay</article-title>. <source>Virtual Real.</source> <volume>25</volume>, <fpage>69</fpage>&#x2013;<lpage>81</lpage>. <pub-id pub-id-type="doi">10.1007/s10055-020-00440-y</pub-id>
</citation>
</ref>
<ref id="B34">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ledoux</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Nguyen</surname>
<given-names>A. S.</given-names>
</name>
<name>
<surname>Bakos-Block</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Bordnick</surname>
<given-names>P.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>Using virtual reality to study food cravings</article-title>. <source>Appetite</source> <volume>71</volume>, <fpage>396</fpage>&#x2013;<lpage>402</lpage>. <pub-id pub-id-type="doi">10.1016/j.appet.2013.09.006</pub-id>
</citation>
</ref>
<ref id="B35">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Leung</surname>
<given-names>C. Y.</given-names>
</name>
<name>
<surname>Abe</surname>
<given-names>S. K.</given-names>
</name>
<name>
<surname>Sawada</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Ishihara</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Takachi</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Yamaji</surname>
<given-names>T.</given-names>
</name>
<etal/>
</person-group> (<year>2021</year>). <article-title>Sugary drink consumption and risk of kidney and bladder cancer in Japanese adults</article-title>. <source>Sci. Rep.</source> <volume>11</volume>, <fpage>21701</fpage>&#x2013;<lpage>21709</lpage>. <pub-id pub-id-type="doi">10.1038/s41598-021-01103-x</pub-id>
</citation>
</ref>
<ref id="B36">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lipson-Smith</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Bernhardt</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Zamuner</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Churilov</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Busietta</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Moratti</surname>
<given-names>D.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Exploring colour in context using virtual reality: Does a room change how you feel?</article-title> <source>Virtual Real.</source> <volume>25</volume>, <fpage>631</fpage>&#x2013;<lpage>645</lpage>. <pub-id pub-id-type="doi">10.1007/s10055-020-00479-x</pub-id>
</citation>
</ref>
<ref id="B37">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Maga</surname>
<given-names>J. A.</given-names>
</name>
</person-group> (<year>1974</year>). <article-title>Influence of color on taste threshold</article-title>. <source>Chem. Senses</source> <volume>1</volume>, <fpage>115</fpage>&#x2013;<lpage>119</lpage>. <pub-id pub-id-type="doi">10.1093/chemse/1.1.115</pub-id>
</citation>
</ref>
<ref id="B38">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Magdin</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Balogh</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Reichel</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Francisti</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Koprda</surname>
<given-names>&#x160;.</given-names>
</name>
<name>
<surname>Gy&#xf6;rgy</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Automatic detection and classification of emotional states in virtual reality and standard environments (lcd): Comparing valence and arousal of induced emotions</article-title>. <source>Virtual Real.</source> <volume>1</volume>, <fpage>1029</fpage>&#x2013;<lpage>1041</lpage>. <pub-id pub-id-type="doi">10.1007/s10055-021-00506-5</pub-id>
</citation>
</ref>
<ref id="B39">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Moser</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Tscheligi</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2013</year>). &#x201c;<article-title>Playful taste interaction</article-title>,&#x201d; in <conf-name>IDC &#x2019;13: Proceedings of the 12th International Conference on Interaction Design and Children</conf-name>, <conf-loc>New York, NY</conf-loc>, <conf-date>24 June 2013</conf-date> (<publisher-loc>New York</publisher-loc>: <publisher-name>Association for Computing Machinery</publisher-name>), <fpage>340</fpage>&#x2013;<lpage>343</lpage>. <pub-id pub-id-type="doi">10.1145/2485760.2485828</pub-id>
</citation>
</ref>
<ref id="B40">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Narumi</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Nishizaka</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Kajinami</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Tanikawa</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Hirose</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2011</year>). &#x201c;<article-title>Augmented reality flavors: Gustatory display based on edible marker and cross-modal interaction</article-title>,&#x201d; in <conf-name>Proceedings of the SIGCHI Conference on Human Factors in Computing Systems</conf-name>, <conf-loc>New York, NY, USA</conf-loc>, <conf-date>07 May 2011</conf-date> (<publisher-name>Association for Computing Machinery</publisher-name>), <fpage>93</fpage>&#x2013;<lpage>102</lpage>. <comment>CHI &#x2019;11</comment>. <pub-id pub-id-type="doi">10.1145/1978942.1978957</pub-id>
</citation>
</ref>
<ref id="B41">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Nishizawa</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Jiang</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Okajima</surname>
<given-names>K.</given-names>
</name>
</person-group> (<year>2016</year>). &#x201c;<article-title>Projective-ar system for customizing the appearance and taste of food</article-title>,&#x201d; in <conf-name>Proceedings of the 2016 Workshop on Multimodal Virtual and Augmented Reality</conf-name>, <conf-loc>New York, NY, USA</conf-loc>, <conf-date>16 November 2016</conf-date> (<publisher-name>Association for Computing Machinery</publisher-name>). <comment>MVAR &#x2019;16</comment>. <pub-id pub-id-type="doi">10.1145/3001959.3001966</pub-id>
</citation>
</ref>
<ref id="B42">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Oberfeld</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Hecht</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Allendorf</surname>
<given-names>U.</given-names>
</name>
<name>
<surname>Wickelmaier</surname>
<given-names>F.</given-names>
</name>
</person-group> (<year>2009</year>). <article-title>Ambient lighting modifies the flavor of wine</article-title>. <source>J. Sens. Stud.</source> <volume>24</volume>, <fpage>797</fpage>&#x2013;<lpage>832</lpage>. <pub-id pub-id-type="doi">10.1111/j.1745-459X.2009.00239.x</pub-id>
</citation>
</ref>
<ref id="B43">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Oberfeld</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Hecht</surname>
<given-names>H.</given-names>
</name>
</person-group> (<year>2011</year>). <article-title>Fashion versus perception: The impact of surface lightness on the perceived dimensions of interior space</article-title>. <source>Hum. Factors</source> <volume>53</volume>, <fpage>284</fpage>&#x2013;<lpage>298</lpage>. <pub-id pub-id-type="doi">10.1177/0018720811407331</pub-id>
</citation>
</ref>
<ref id="B44">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Pan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Ren</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Brombacher</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Vos</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2019</year>). &#x201c;<article-title>Designing technology to encourage healthy eating at work</article-title>,&#x201d; in <conf-name>Proceedings of the 9th International Conference on Digital Public Health</conf-name>, <conf-loc>New York, NY, USA</conf-loc>, <conf-date>20 November 2019</conf-date> (<publisher-name>Association for Computing Machinery</publisher-name>), <fpage>131</fpage>. <comment>DPH2019</comment>. <pub-id pub-id-type="doi">10.1145/3357729.3357759</pub-id>
</citation>
</ref>
<ref id="B45">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Parise</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2013</year>). &#x201c;<article-title>Audiovisual cross-modal correspondences in the general population</article-title>,&#x201d; in <source>The Oxford handbook of synesthesia</source> (<publisher-loc>Oxford, United Kingdom</publisher-loc>: <publisher-name>Oxford University Press</publisher-name>).</citation>
</ref>
<ref id="B46">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Piqueras-Fiszman</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Colour, pleasantness, and consumption behaviour within a meal</article-title>. <source>Appetite</source> <volume>75</volume>, <fpage>165</fpage>&#x2013;<lpage>172</lpage>. <pub-id pub-id-type="doi">10.1016/j.appet.2014.01.004</pub-id>
</citation>
</ref>
<ref id="B47">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Piqueras-Fiszman</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>The influence of the color of the cup on consumers&#x2019; perception of a hot beverage</article-title>. <source>J. Sens. Stud.</source> <volume>27</volume>, <fpage>324</fpage>&#x2013;<lpage>331</lpage>. <pub-id pub-id-type="doi">10.1111/j.1745-459X.2012.00397.x</pub-id>
</citation>
</ref>
<ref id="B48">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Slater</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Wilbur</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>1997</year>). <article-title>A framework for immersive virtual environments (five): Speculations on the role of presence in virtual environments</article-title>. <source>Presence. (Camb).</source> <volume>6</volume>, <fpage>603</fpage>&#x2013;<lpage>616</lpage>. <pub-id pub-id-type="doi">10.1162/pres.1997.6.6.603</pub-id>
</citation>
</ref>
<ref id="B49">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>So</surname>
<given-names>R. H.</given-names>
</name>
<name>
<surname>Yuen</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2007</year>). &#x201c;<article-title>Comparing symptoms of visually induced motion sickness among viewers of four similar virtual environments with different color</article-title>,&#x201d; in <source>Virtual reality</source> (<publisher-loc>Berlin, Germany</publisher-loc>: <publisher-name>Springer Berlin Heidelberg</publisher-name>), <fpage>386</fpage>&#x2013;<lpage>391</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-540-73335-5_42</pub-id>
</citation>
</ref>
<ref id="B50">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Carvalho</surname>
<given-names>F. M.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>The coffee drinking experience: Product extrinsic (atmospheric) influences on taste and choice</article-title>. <source>Food Qual. Prefer.</source> <volume>80</volume>, <fpage>103802</fpage>. <pub-id pub-id-type="doi">10.1016/j.foodqual.2019.103802</pub-id>
</citation>
</ref>
<ref id="B51">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2011</year>). <article-title>Crossmodal correspondences: A tutorial review</article-title>. <source>Atten. Percept. Psychophys.</source> <volume>73</volume>, <fpage>971</fpage>&#x2013;<lpage>995</lpage>. <pub-id pub-id-type="doi">10.3758/s13414-010-0073-7</pub-id>
</citation>
</ref>
<ref id="B52">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Levitan</surname>
<given-names>C. A.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Explaining crossmodal correspondences between colours and tastes</article-title>. <source>i-Perception</source> <volume>12</volume>, <fpage>204166952110182</fpage>. <pub-id pub-id-type="doi">10.1177/20416695211018223</pub-id>
</citation>
</ref>
<ref id="B53">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2019a</year>). <article-title>Multisensory experiential wine marketing</article-title>. <source>Food Qual. Prefer.</source> <volume>71</volume>, <fpage>106</fpage>&#x2013;<lpage>116</lpage>. <pub-id pub-id-type="doi">10.1016/j.foodqual.2018.06.010</pub-id>
</citation>
</ref>
<ref id="B54">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2019b</year>). <article-title>On the relationship(s) between color and taste/flavor</article-title>. <source>Exp. Psychol.</source> <volume>66</volume>, <fpage>99</fpage>&#x2013;<lpage>111</lpage>. <pub-id pub-id-type="doi">10.1027/1618-3169/a000439</pub-id>
</citation>
</ref>
<ref id="B55">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Oral referral: On the mislocalization of odours to the mouth</article-title>. <source>Food Qual. Prefer.</source> <volume>50</volume>, <fpage>117</fpage>&#x2013;<lpage>128</lpage>. <pub-id pub-id-type="doi">10.1016/j.foodqual.2016.02.006</pub-id>
</citation>
</ref>
<ref id="B56">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Piqueras-Fiszman</surname>
<given-names>B.</given-names>
</name>
</person-group> (<year>2016</year>). &#x201c;<article-title>Food color and its impact on taste/flavor perception</article-title>,&#x201d; in <source>Multisensory flavor perception</source> (<publisher-loc>Amsterdam, Netherlands</publisher-loc>: <publisher-name>Elsevier</publisher-name>), <fpage>107</fpage>&#x2013;<lpage>132</lpage>.</citation>
</ref>
<ref id="B57">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Velasco</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Knoeferle</surname>
<given-names>K.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>A large sample study on the influence of the multisensory environment on the wine drinking experience</article-title>. <source>Flavour</source> <volume>3</volume>, <fpage>8</fpage>&#x2013;<lpage>12</lpage>. <pub-id pub-id-type="doi">10.1186/2044-7248-3-8</pub-id>
</citation>
</ref>
<ref id="B58">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Velasco</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>On the multiple effects of packaging colour on consumer behaviour and product experience in the &#x2018;food and beverage&#x2019; and &#x2018;home and personal care&#x2019; categories</article-title>. <source>Food Qual. Prefer.</source> <volume>68</volume>, <fpage>226</fpage>&#x2013;<lpage>237</lpage>. <pub-id pub-id-type="doi">10.1016/j.foodqual.2018.03.008</pub-id>
</citation>
</ref>
<ref id="B59">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Stein</surname>
<given-names>B. E.</given-names>
</name>
<name>
<surname>Meredith</surname>
<given-names>M. A.</given-names>
</name>
</person-group> (<year>1993</year>). <source>The merging of the senses</source>. <publisher-loc>Massachusetts, United States</publisher-loc>: <publisher-name>MIT Press</publisher-name>.</citation>
</ref>
<ref id="B60">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Stelick</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Penano</surname>
<given-names>A. G.</given-names>
</name>
<name>
<surname>Riak</surname>
<given-names>A. C.</given-names>
</name>
<name>
<surname>Dando</surname>
<given-names>R.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Dynamic context sensory testing&#x2013;a proof of concept study bringing virtual reality to the sensory booth</article-title>. <source>J. Food Sci.</source> <volume>83</volume>, <fpage>2047</fpage>&#x2013;<lpage>2051</lpage>. <pub-id pub-id-type="doi">10.1111/1750-3841.14275</pub-id>
</citation>
</ref>
<ref id="B61">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sugimori</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Kawasaki</surname>
<given-names>Y.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Cross-modal correspondence between visual information and taste perception of bitter foods and drinks</article-title>. <source>Food Qual. Prefer.</source> <volume>98</volume>, <fpage>104539</fpage>. <pub-id pub-id-type="doi">10.1016/j.foodqual.2022.104539</pub-id>
</citation>
</ref>
<ref id="B62">
<citation citation-type="journal">
<collab>Unity</collab> (<year>2019</year>). <article-title>Global illumination</article-title>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://docs.unity.cn/2019.1/Documentation/Manual/GlobalIllumination.html">https://docs.unity.cn/2019.1/Documentation/Manual/GlobalIllumination.html</ext-link>
</comment> (<comment>Accessed November 24, 2022</comment>).</citation>
</ref>
<ref id="B63">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>von Castell</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Hecht</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Oberfeld</surname>
<given-names>D.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Which attribute of ceiling color influences perceived room height?</article-title> <source>Hum. Factors</source> <volume>60</volume>, <fpage>1228</fpage>&#x2013;<lpage>1240</lpage>. <pub-id pub-id-type="doi">10.1177/0018720818789524</pub-id>
</citation>
</ref>
<ref id="B64">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wan</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Woods</surname>
<given-names>A. T.</given-names>
</name>
<name>
<surname>van den Bosch</surname>
<given-names>J. J.</given-names>
</name>
<name>
<surname>McKenzie</surname>
<given-names>K. J.</given-names>
</name>
<name>
<surname>Velasco</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Cross-cultural differences in crossmodal correspondences between basic tastes and visual features</article-title>. <source>Front. Psychol.</source> <volume>5</volume>, <fpage>1365</fpage>. <pub-id pub-id-type="doi">10.3389/fpsyg.2014.01365</pub-id>
</citation>
</ref>
<ref id="B65">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname>
<given-names>Q. J.</given-names>
</name>
<name>
<surname>Barbosa Escobar</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Alves Da Mota</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Velasco</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Getting started with virtual reality for sensory and consumer science: Current practices and future perspectives</article-title>. <source>Food Res. Int.</source> <volume>145</volume>, <fpage>110410</fpage>. <pub-id pub-id-type="doi">10.1016/j.foodres.2021.110410</pub-id>
</citation>
</ref>
<ref id="B66">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname>
<given-names>Q. J.</given-names>
</name>
<name>
<surname>Meyer</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Waters</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Zendle</surname>
<given-names>D.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>A dash of virtual milk: Altering product color in virtual reality influences flavor perception of cold-brew coffee</article-title>. <source>Front. Psychol.</source> <volume>11</volume>, <fpage>595788</fpage>. <pub-id pub-id-type="doi">10.3389/fpsyg.2020.595788</pub-id>
</citation>
</ref>
<ref id="B67">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname>
<given-names>Q. J.</given-names>
</name>
<name>
<surname>Mielby</surname>
<given-names>L. A.</given-names>
</name>
<name>
<surname>Junge</surname>
<given-names>J. Y.</given-names>
</name>
<name>
<surname>Bertelsen</surname>
<given-names>A. S.</given-names>
</name>
<name>
<surname>Kidmose</surname>
<given-names>U.</given-names>
</name>
<name>
<surname>Spence</surname>
<given-names>C.</given-names>
</name>
<etal/>
</person-group> (<year>2019</year>). <article-title>The role of intrinsic and extrinsic sensory factors in sweetness perception of food and beverages: A review</article-title>. <source>Foods</source> <volume>8</volume>, <fpage>211</fpage>. <pub-id pub-id-type="doi">10.3390/foods8060211</pub-id>
</citation>
</ref>
<ref id="B68">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Wobbrock</surname>
<given-names>J. O.</given-names>
</name>
<name>
<surname>Findlater</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Gergle</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Higgins</surname>
<given-names>J. J.</given-names>
</name>
</person-group> (<year>2011</year>). &#x201c;<article-title>The aligned rank transform for nonparametric factorial analyses using only anova procedures</article-title>,&#x201d; in <conf-name>Proceedings of the SIGCHI Conference on Human Factors in Computing Systems</conf-name>, <conf-loc>New York, NY, USA</conf-loc>, <conf-date>07 May 2011</conf-date> (<publisher-name>Association for Computing Machinery</publisher-name>), <fpage>143</fpage>&#x2013;<lpage>146</lpage>. <comment>CHI &#x2019;11</comment>. <pub-id pub-id-type="doi">10.1145/1978942.1978963</pub-id>
</citation>
</ref>
</ref-list>
</back>
</article>