<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Archiving and Interchange DTD v2.3 20070202//EN" "archivearticle.dtd">
<?covid-19-tdm?>
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="methods-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Psychol.</journal-id>
<journal-title>Frontiers in Psychology</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Psychol.</abbrev-journal-title>
<issn pub-type="epub">1664-1078</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpsyg.2022.954803</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Psychology</subject>
<subj-group>
<subject>Methods</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Psychological distance and user engagement in online exhibitions: Visualization of moir&#x00E9; patterns based on electroencephalography signals</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes"><name><surname>Li</surname><given-names>Jingjing</given-names></name>
<xref rid="aff1" ref-type="aff"><sup>1</sup></xref>
<xref rid="aff2" ref-type="aff"><sup>2</sup></xref>
<xref rid="c001" ref-type="corresp"><sup>&#x002A;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/1695895/overview"/>
</contrib>
<contrib contrib-type="author"><name><surname>Yang</surname><given-names>Ye</given-names></name>
<xref rid="aff3" ref-type="aff"><sup>3</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/1936157/overview"/>
</contrib>
<contrib contrib-type="author"><name><surname>Zhang</surname><given-names>Zhexin</given-names></name>
<xref rid="aff1" ref-type="aff"><sup>1</sup></xref>
<xref rid="aff2" ref-type="aff"><sup>2</sup></xref>
</contrib>
<contrib contrib-type="author"><name><surname>Yoshida</surname><given-names>Nozomu</given-names></name>
<xref rid="aff1" ref-type="aff"><sup>1</sup></xref>
<xref rid="aff2" ref-type="aff"><sup>2</sup></xref>
</contrib>
<contrib contrib-type="author"><name><surname>Xanat</surname><given-names>Vargas Meza</given-names></name>
<xref rid="aff2" ref-type="aff"><sup>2</sup></xref>
<xref rid="aff4" ref-type="aff"><sup>4</sup></xref>
</contrib>
<contrib contrib-type="author" corresp="yes"><name><surname>Ochiai</surname><given-names>Yoichi</given-names></name>
<xref rid="aff2" ref-type="aff"><sup>2</sup></xref>
<xref rid="aff4" ref-type="aff"><sup>4</sup></xref>
<xref rid="c002" ref-type="corresp"><sup>&#x002A;</sup></xref>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Graduate School of Comprehensive Human Sciences, University of Tsukuba</institution>, <addr-line>Tsukuba</addr-line>, <country>Japan</country></aff>
<aff id="aff2"><sup>2</sup><institution>R&#x0026;D Center for Digital Nature, University of Tsukuba</institution>, <addr-line>Tsukuba</addr-line>, <country>Japan</country></aff>
<aff id="aff3"><sup>3</sup><institution>College of Design and Innovation, Tongji University</institution>, <addr-line>Shanghai</addr-line>, <country>China</country></aff>
<aff id="aff4"><sup>4</sup><institution>Faculty of Library, Information and Media Science, University of Tsukuba</institution>, <addr-line>Tsukuba</addr-line>, <country>Japan</country></aff>
<author-notes>
<fn id="fn0001" fn-type="edited-by">
<p>Edited by: Anfan Chen, The Chinese University of Hong Kong, China</p>
</fn>
<fn id="fn0002" fn-type="edited-by">
<p>Reviewed by: Ruqin Ren, Shanghai Jiao Tong University, China; Wenwen Chang, Lanzhou Jiaotong University, China; Rongrong Fu, Yanshan University, China</p>
</fn>
<corresp id="c001">&#x002A;Correspondence: Jingjing Li, <email>li@digitalnature.slis.tsukuba.ac.jp</email></corresp>
<corresp id="c002">Yoichi Ochiai, <email>wizard@slis.tsukuba.ac.jp</email></corresp>
<fn id="fn0003" fn-type="other">
<p>This article was submitted to Health Psychology, a section of the journal Frontiers in Psychology</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>15</day>
<month>09</month>
<year>2022</year>
</pub-date>
<pub-date pub-type="collection">
<year>2022</year>
</pub-date>
<volume>13</volume>
<elocation-id>954803</elocation-id>
<history>
<date date-type="received">
<day>27</day>
<month>05</month>
<year>2022</year>
</date>
<date date-type="accepted">
<day>09</day>
<month>08</month>
<year>2022</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2022 Li, Yang, Zhang, Yoshida, Xanat and Ochiai.</copyright-statement>
<copyright-year>2022</copyright-year>
<copyright-holder>Li, Yang, Zhang, Yoshida, Xanat and Ochiai</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<p>The COVID-19 pandemic has significantly affected the exhibition of artworks in museums and galleries. Many have displayed their collection online. In this context, experiencing an online exhibition is essential for visitors to appreciate and understand the artwork. Compared with offline exhibitions, visitors to online exhibitions are often unable to communicate their experiences with other visitors. Therefore, in this study, by facilitating communication <italic>via</italic> Zoom call, we established a system that allows two people to visit the museum together through the Google Arts and Culture (GA&#x0026;C) website. To reduce the psychological distance between online visitors and help increase user engagement, we designed and developed a media device based on moir&#x00E9; pattern visualization of electroencephalography (EEG) signals. The participants were divided into two groups to visit the online museum, communicating remotely through Zoom. The objective of this study was to test whether a real-time EEG signal visualization device could help close the psychological distance between participants and whether it could increase user engagement with the online exhibition. Participants were randomly assigned to either the normal online exhibition experience (NOEE) group or EEG signal visualization device (ESVD) group. Participants in the NOEE group experienced four online exhibitions (Task1, Task2, Task3, and Task4) together (two participants per test unit). The conditions for participants in the ESVD group remained the same, apart from adding a media device to enable them to visualize EEG signals. A total of 40 university students participated in this study. Independent samples <italic>t</italic>-tests revealed that participants in the ESVD group perceived a significantly closer psychological distance between themselves and the participants on the opposite side than those in the NOEE group (<italic>t</italic>&#x2009;=&#x2009;&#x2212;2.699; <italic>p</italic>&#x2009;=&#x2009;0.008&#x2009;&#x003C;&#x2009;0.05). A one-way ANOVA revealed that participants experienced Task3 with significantly closer psychological distance assessments than Task1 (<italic>p</italic>&#x2009;=&#x2009;0.002&#x2009;&#x003C;&#x2009;0.05), Task2 (<italic>p</italic>&#x2009;=&#x2009;0.000&#x2009;&#x003C;&#x2009;0.05), and Task4 (<italic>p</italic>&#x2009;=&#x2009;0.001&#x2009;&#x003C;&#x2009;0.05). Repeated ANOVAs revealed that participants in the ESVD group had higher overall user engagement than those in the NOEE group, with marginal significance (<italic>p</italic>&#x2009;=&#x2009;0.056&#x2009;&#x003C;&#x2009;0.1). Thus, this study shows that EEG visualization media devices can reduce the psychological distance between two participants when experiencing an online exhibition. Moreover, it can increase user engagement to some extent.</p>
</abstract>
<kwd-group>
<kwd>COVID-19</kwd>
<kwd>online exhibition</kwd>
<kwd>psychological distance</kwd>
<kwd>electroencephalography (EEG)</kwd>
<kwd>user engagement (UE)</kwd>
<kwd>user experience (UX)</kwd>
<kwd>moir&#x00E9; patterns</kwd>
</kwd-group>
<contract-num rid="cn1">JPMJSP2124</contract-num>
<contract-num rid="cn2">JPMJCR1781</contract-num>
<contract-sponsor id="cn1">Japan Science and Technology Agency<named-content content-type="fundref-id">10.13039/501100002241</named-content>
</contract-sponsor>
<contract-sponsor id="cn2">Core Research for Evolutional Science and Technology<named-content content-type="fundref-id">10.13039/501100003382</named-content>
</contract-sponsor>
<counts>
<fig-count count="6"/>
<table-count count="8"/>
<equation-count count="0"/>
<ref-count count="76"/>
<page-count count="15"/>
<word-count count="9841"/>
</counts>
</article-meta>
</front>
<body>
<sec id="sec1" sec-type="intro">
<title>Introduction</title>
<p>COVID-19 has impacted many areas of our daily lives, including the exhibition of artworks. Most museums and galleries have restricted the number of visitors to prevent the spread of COVID-19 by ensuring social distancing. In public health, social distancing is also called physical distancing (<xref ref-type="bibr" rid="ref30">Hensley, 2020</xref>; <xref ref-type="bibr" rid="ref73">Venske, 2020</xref>). Physical distancing involves staying at least 6&#x2009;ft away from others to avoid contracting an airborne disease such as COVID-19. It is a set of non-pharmaceutical interventions or measures designed to prevent the spread of infectious diseases by maintaining physical distance among people and reducing the number of times people come into close contact (<xref ref-type="bibr" rid="ref55">Perra, 2021</xref>). Physical distance maps the psychological distance between individuals, reflecting the degree of intimacy in interpersonal relationships (<xref ref-type="bibr" rid="ref63">Shenghua, 1997</xref>, pp. 118&#x2013;119). Physical distance is even more important for museum and art gallery visitors, especially in terms of processing, experiencing, and appreciating art. Therefore, in pandemic situations, visitors are limited in viewing the exhibition, since it is difficult to convey ideas to others and listen to their thoughts and feelings.</p>
<p>In 1912, Bullough developed the concept of psychological distance in the aesthetics field to illustrate those esthetic feelings stem from the psychological distance that an observer perceives between themselves and the artwork (<xref ref-type="bibr" rid="ref17">Dhar and Wertenbroch, 2000</xref>; <xref ref-type="bibr" rid="ref12">Chen and Li, 2018</xref>). In light of this, in the pandemic and post-pandemic era, the categories of distances in Personal Space Theory proposed by <xref ref-type="bibr" rid="ref28">Hall (1959)</xref> were diminished, losing the &#x201C;intimate distance&#x201D; (0&#x2013;0.45&#x2009;m, 0&#x2013;1.5&#x2009;ft), &#x201C;personal distance&#x201D; (0.45&#x2013;1.2&#x2009;m, 1.5&#x2013;4&#x2009;ft), and part of the &#x201C;common distance&#x201D; (1.2&#x2013;3.6&#x2009;m, 4&#x2013;12&#x2009;ft) and &#x201C;public distance&#x201D; (3.6&#x2013;7.6&#x2009;m, 12&#x2013;25&#x2009;ft). Hence, the distances of Personal Space Theory have changed. On the other hand, and more importantly, online communication is becoming more common; for example, online classes (<xref ref-type="bibr" rid="ref57">Pokhrel and Chhetri, 2021</xref>), online work (<xref ref-type="bibr" rid="ref6">Blanchard, 2021</xref>), and online psychotherapy (<xref ref-type="bibr" rid="ref74">Weinberg, 2020</xref>); this phenomenon has accelerated online communication and interactions and even become a necessity for many.</p>
<p>As a result of COVID-19, the trend of online communication has become more prominent; consequently, many museums and art galleries have provided online viewing experiences to visitors. International Council of Museums (ICOM) reported that online activities would continue to increase, particularly by creating new digital communication channels in the wake of lockdowns. Additionally, an increasing number of museums and galleries are planning to add online exhibitions to their activities after the end of lockdowns (an increase of 5.6%), and more are planning to start online exhibitions (an increase of 6.5%; <xref ref-type="bibr" rid="ref33">International Council of Museums, 2021</xref>, pp. 15&#x2013;16). Digital communication activities increased by at least 15% in museums in Europe and Asia, while the <xref ref-type="bibr" rid="ref330">Network of European Museum Organisations (2020)</xref> reported that 58% of museums held digital activities (an increase of 37%) and 23% started new activities (<xref ref-type="bibr" rid="ref38">King et al., 2021</xref>). Thus, the trend of online exhibitions is in line with the trend of online activities. Additionally, online exhibitions are more inclusive, allowing access to people who normally do not have access to physical museums and galleries.</p>
<p>The Internet network makes it possible to chat across time and space and seems to make the distance between people disappear. However, something inherently embedded in the virtual world distorts communication when using AI-facilitated chatting tools (<xref ref-type="bibr" rid="ref58">Radziwill and Benton, 2017</xref>). Overall, we face uncertain and confused distances both in the physical and virtual worlds (<xref ref-type="bibr" rid="ref16">Dewey, 2018</xref>), bringing us into a new era of a &#x201C;distance crisis.&#x201D; Psychologically distant objects and events are not present in a direct experience of reality. In this case, distance is not limited to the physical surroundings, and it could also be abstract (<xref ref-type="bibr" rid="ref70">Trope and Liberman, 2010</xref>).</p>
<p>With the development of online exhibition technologies, digital devices, such as virtual reality (VR), 3D panoramic VR, and 3D web engines have been used to display artwork (<xref ref-type="bibr" rid="ref67">Styliani et al., 2009</xref>). Cell phones, computers, tablets, and interactive multimedia devices [e.g., VR/augmented reality (AR) devices], as information dissemination media and bearers of digital technology (<xref ref-type="bibr" rid="ref11">Chang et al., 2014</xref>; <xref ref-type="bibr" rid="ref75">Wu and Li, 2022</xref>), gradually change people&#x2019;s exhibition viewing modes, making their experiences more human and emotional. The Google Arts and Culture (GA&#x0026;C) website is one of the most representative online exhibition sites, featuring selected content from more than 2000 top museums, galleries, and archives (<xref ref-type="bibr" rid="ref26">Google Arts and Culture, 2018</xref>). Therefore, we selected four different interactive exhibitions from the GA&#x0026;C website as experiential tasks in our experiment. As visiting and appreciating artworks is also a process of mutual communication and exchange of feelings among visitors, online exhibitions are also increasingly enabling online multiplayer experiences to increase interaction and communication between users. For example, the GA&#x0026;C website features an interactive multiplayer game called &#x201C;Puzzle Party&#x201D; in which participants work together to put together scattered pieces of the puzzle based on reference images of the artwork (<xref ref-type="bibr" rid="ref27">Google Arts and Culture, 2020</xref>). However, only a few studies have focused on exploring the psychological distance between users in remote interactions.</p>
<p>Our study focuses on exploring a new way of interaction to decrease the psychological distance between participants in online exhibitions. Because people&#x2019;s physiological signal data are closely related to their emotional and physical states and reactions, we aim to express the &#x201C;distances&#x201D; between people by visualizing physiological signals. Among physiological signals, brainwave signals originate from voltage fluctuations caused by ionic currents within brain neurons and have been shown to represent macroscopic activities on the surface layers of the brain (<xref ref-type="bibr" rid="ref51">Niedermeyer and da Silva, 2005</xref>). Considering this, our study aimed to apply electroencephalography (EEG) signals to explore new physical distance expressions. Currently, an increasing number of studies are incorporating user EEG signals. Relevant studies and applications exist in many fields, such as user experience analysis during human&#x2013;computer interaction (<xref ref-type="bibr" rid="ref41">Lee, 2004</xref>; <xref ref-type="bibr" rid="ref20">Frey et al., 2013</xref>; <xref ref-type="bibr" rid="ref45">Li et al., 2022b</xref>), driver fatigue detection (<xref ref-type="bibr" rid="ref35">Jap et al., 2009</xref>; <xref ref-type="bibr" rid="ref7">Borghini et al., 2014</xref>; <xref ref-type="bibr" rid="ref23">Gao et al., 2019</xref>), emotion recognition (<xref ref-type="bibr" rid="ref1">Alarcao and Fonseca, 2017</xref>; <xref ref-type="bibr" rid="ref64">Shu et al., 2018</xref>), etc. Additionally, more studies have used headset-based noninvasive brain&#x2013;computer interfaces (<xref ref-type="bibr" rid="ref15">Cincotti et al., 2008</xref>; <xref ref-type="bibr" rid="ref10">Cecotti, 2011</xref>). These studies demonstrated that EEG headset devices have increased accuracy (<xref ref-type="bibr" rid="ref49">Maskeliunas et al., 2016</xref>; <xref ref-type="bibr" rid="ref40">LaRocco et al., 2020</xref>; <xref ref-type="bibr" rid="ref22">Fu et al., 2022</xref>).</p>
<p>This novel form of distance expression needs to be visualized to help people&#x2019;s awareness of &#x201C;distance.&#x201D; The data visualization of various types of information has always been an effective way to transform abstract data and concepts into clearly understandable images. Many fields have applied EEG signal visualization, including affective visualization (<xref ref-type="bibr" rid="ref47">Liu et al., 2010</xref>; <xref ref-type="bibr" rid="ref21">Fu and Li, 2022</xref>), interactive art exhibitions (<xref ref-type="bibr" rid="ref13">Christopher et al., 2013</xref>, <xref ref-type="bibr" rid="ref14">2014</xref>), gaming experiences (<xref ref-type="bibr" rid="ref37">Kerous et al., 2018</xref>), etc., to enhance participants&#x2019; understanding and visual perception of their EEG signal changes. Therefore, distance visualization can enhance the perception of human interaction.</p>
<p>We simulated two participants together experiencing four online exhibitions from the GA&#x0026;C website by designing a system. This system was as follows: the two participants were in the same room, and a computer and a screen displaying the EEG signal visualization was assigned to each participant. As the participants were separated by two screens, they could not see each other, thus simulating the scenario of real remote interaction. The experiment used in this study was a comparison experiment. The only independent variable was a media device that transformed the difference in EEG signals between the two participants into real-time moir&#x00E9; patterns. From the 17th to the 20th century, moir&#x00E9; patterns were gradually discovered and explored in mathematics, physics, and art (<xref ref-type="bibr" rid="ref34">Isaac, 2000</xref>). These were defined as interference images produced by more than two similar fence-like overlapping stripes. Participants were divided into two groups, one of which communicated through Zoom with the camera turned on during the exhibition. The other group was exposed to EEG signal visualization screens. Each screen was placed in front of the participant. During the intervention, participants were able to view the changes in the moir&#x00E9; images displayed on the screens.</p>
<p>Our research questions are as follows:</p>
<list list-type="bullet">
<list-item>
<p>R.Q.1: Does visualization of EEG signal differences in the online exhibition experience help users reduce psychological distance?</p>
</list-item>
<list-item>
<p>R.Q.2: Can EEG enhances user engagement in online environments?</p>
</list-item>
</list>
</sec>
<sec id="sec2">
<title>Materials and equipment</title>
<sec id="sec3">
<title>Participants</title>
<p>We considered a sample of university students. They were deemed suitable for testing whether the addition of an EEG visualization device would help improve psychological distance and user engagement, given the operational complexity of online exhibitions and evaluation of engagement in online experiences. Before we recruited the participants, we designed our experiments in accordance with the Declaration of Helsinki by the World Medical Association. The ethics review office of the Faculty of Library, Information and Media Science of the University of Tsukuba in Japan approved the study (permission number 22&#x2013;4).</p>
<p>In the notice for recruiting participants, we marked the general content and flow of the experiment, explicitly stating that two people were required to participate in the experiment as a unit. To reduce the effect of the difference in familiarity between the two participants in each experiment on psychological distance, we selected participants who had known each other for a long time (65% of participants were mutually acquainted for more than 1&#x2009;year) and were mainly from the same research laboratory. Before starting the experiment, we informed the participants that there was no compensation for participation in the experiment. After the participants received a complete explanation of the study, they agreed to participate and subsequently signed a written informed consent form. Permission of publication of any potentially identifiable images or data included in this study was obtained from pertinent individuals.</p>
</sec>
<sec id="sec4">
<title>Materials</title>
<sec id="sec5">
<title>Experimental scenarios and equipment</title>
<p>The experiment location was a classroom with an area of 39&#x2009;m<sup>2</sup> equipped with Wi-Fi. The equipment used during the experiment included the following: six single light wood-colored desks, made of particleboard covered with a laminate; two Mac laptops displaying the online exhibition; two Apple earphones; two EPOC X headsets for capturing EEG signals; two laptops for recording EEG signal data; two 70-in screens displaying moir&#x00E9; patterns based on EEG signals; two iPads for answering questionnaires; and one DJI Pocket 2 video camera for recording the whole experiment.</p>
</sec>
<sec id="sec6">
<title>Google arts and culture</title>
<p>In this study, we classified online exhibition websites into four types based on the differences in the interaction methods between exhibits and users on the GA&#x0026;C website. The GA&#x0026;C Project was launched in 2011 in collaboration with 17 collaborating museums. The original 1,061 high-resolution images (created by 486 artists from different backgrounds) were displayed in 385 virtual exhibition rooms with 6,000 street-view-style panoramic images (<xref ref-type="bibr" rid="ref3">AtoZ Wiki, 2011</xref>; <xref ref-type="bibr" rid="ref36">Kennicott, 2011</xref>). With the aim of making culture more accessible, the project digitized millions of artifacts and made them available online, accessible to everyone (<xref ref-type="bibr" rid="ref36">Kennicott, 2011</xref>).</p>
<p>Google Arts and Culture has a wealth of content and features, including Virtual Museum Tour, Explore and Discover, Zoom Views, Create Your Own Collections, and educational content. The homepage of the GA&#x0026;C website is divided into different modules based on these contents and functions, including 2D images and information, 3D virtual space, game interaction, and video explanation. Based on the functions and modules mentioned above, we selected four corresponding exhibitions based on four different interaction types on the GA&#x0026;C website as tasks in this experiment. The four interaction types are as follows:</p>
<list list-type="bullet">
<list-item>
<p>Task1: 2D information kiosk; this describes to the visitor what the exhibit expresses, specifically narrative logic, through pictures/text/diagrams, etc.</p>
</list-item>
<list-item>
<p>Task2: 3D virtual exhibition; this is a virtual recreation of physical three-dimensional (3D) exhibitions or museums that allow a visitor to navigate in a way that is closer to reality.</p>
</list-item>
<list-item>
<p>Task3: Interactive game; the user can complete the game (puzzles/coloring games/photography games) tasks with artworks in single or multiplayer mode.</p>
</list-item>
<list-item>
<p>Task4: Video instruction; this explains information related to the artwork through dynamic video (including motion graphics and sound effects).</p>
</list-item>
</list>
</sec>
<sec id="sec7">
<title>The EEG signal visualization device</title>
<p>The human brain contains neurons that communicate <italic>via</italic> electrical impulses. EEG signal measurement is a practical method for detecting sequential changes in brain activity without significant time delays. When we attempted to communicate the relationships between the two brainwave images, we automatically associated them with moir&#x00E9; patterns. Brainwaves and moir&#x00E9; patterns have many common characteristics, not only in principles but also in visual properties.</p>
<p>Illusory patterns always appear as water ripples when shooting a screen with digital devices. This type of pattern is called moir&#x00E9;, which is accidental, transient, and fluid; while it is easy to ignore, it contains variable visual forms (<xref ref-type="bibr" rid="ref66">Spillmann, 1993</xref>).</p>
<p>We conducted two experiments (<xref ref-type="bibr" rid="ref43">Li et al., 2022a</xref>). The operating principle of the EEG signal visualization device is to detect the participants&#x2019; EEG signals; therefore, we built a platform to calculate the differences in real time (see <xref rid="fig1" ref-type="fig">Figure 1</xref>). We applied moir&#x00E9; patterns to visualize EEG signal discrepancies to create the following analogy: when the brainwaves from two participants (A and B) were in phase and got more similar, the generated moir&#x00E9; patterns consequently had smaller sizes with shorter diameters, echoing constructive interferences and shorter &#x201C;distances&#x201D; between the two participants, and vice versa (see <xref rid="fig2" ref-type="fig">Figure 2</xref>). The six scenario images in <xref rid="fig2" ref-type="fig">Figure 2</xref> represent images on the monitor at six different time points. The distance between the two endpoints of the graph on the screen indicated the numerical difference between participant A and participant B&#x2019;s EEG signals and real-time changes. Therefore, when the difference between the EEG signals of the two participants decreased, the shape of the moir&#x00E9; pattern became smaller.</p>
<fig position="float" id="fig1">
<label>Figure 1</label>
<caption>
<p>Flowchart of the operation of the experimental platform.</p>
</caption>
<graphic xlink:href="fpsyg-13-954803-g001.tif"/>
</fig>
<fig position="float" id="fig2">
<label>Figure 2</label>
<caption>
<p>We transformed the difference between the electroencephalography (EEG) signals of participant A and participant B into the moir&#x00E9; pattern, and the pattern displayed six different states on the monitor (the images of the six states are six randomly selected time moments).</p>
</caption>
<graphic xlink:href="fpsyg-13-954803-g002.tif"/>
</fig>
</sec>
<sec id="sec8">
<title>Questionnaire 1: Psychological distance mapping diagram</title>
<p>We designed a picture of a male or female (non-binary gender people could independently choose a closer image) to help participants enhance their identification with the questionnaire (see <xref rid="fig3" ref-type="fig">Figures 3A</xref>,<xref rid="fig3" ref-type="fig">B</xref>). The concepts of distance and location were evoked by a paper questionnaire printed with an image that suggested the participants&#x2019; positional orientation and 3D space (<xref ref-type="bibr" rid="ref39">Kundr&#x00E1;t and Rojkov&#x00E1;, 2021</xref>). The image was printed on A4 paper, and the participants achieved placement by pasting pre-prepared cards (corresponding to the four tasks). The participants used different prompts to identify which part of the image was closer or farther away. One of the cues was a linear perspective convergence grid supported by the texture gradient coverage.</p>
<fig position="float" id="fig3">
<label>Figure 3</label>
<caption>
<p><bold>(A)</bold> Visual stimulus material for those who identified as males; <bold>(B)</bold> Visual stimulus material for those who identified as females; <bold>(C)</bold> Example of a worksheet completed with four tasks of the humanoid card, pasted by a participant who identified as male; <bold>(D)</bold> Example of a worksheet completed with four tasks of the humanoid card, pasted by a participant who identified as female; <bold>(E)</bold> Classification of personal space represented in visually stimulating materials; <bold>(F)</bold> Specific scores (from 1 to 25 scores) for each square represented in the visual stimulus material; <bold>(G)</bold> Example of a worksheet (completed by a participant who identified as male) that displayed scores when counting scores; and <bold>(H)</bold> Example of a worksheet (completed by a participant who identified as female) that displayed scores when counting scores.</p>
</caption>
<graphic xlink:href="fpsyg-13-954803-g003.tif"/>
</fig>
<p>The participants positioned the given cards on the pictures (the cards represented images of participants interacting together) according to the evoked notions of proximity and distance. After each completed online exhibition experience, they received neutral oral instructions to &#x201C;place this card on the picture depending on whether you feel close or far from the object of interaction&#x201D; (see <xref rid="fig3" ref-type="fig">Figures 3C</xref>,<xref rid="fig3" ref-type="fig">D</xref>).</p>
<p>According to the distance theory, we divided the picture into four zones (<xref ref-type="bibr" rid="ref28">Hall, 1959</xref>): intimate distance (0&#x2013;0.45&#x2009;m, 0&#x2013;1.5&#x2009;ft), personal distance (0.45&#x2013;1.2&#x2009;m, 1.5&#x2013;4&#x2009;ft), social distance (1.2&#x2013;3.6&#x2009;m, 4&#x2013;12&#x2009;ft), and public distance (3.6&#x2013;7.6&#x2009;m, 12&#x2013;25&#x2009;ft; see <xref rid="fig3" ref-type="fig">Figure 3E</xref>). Additionally, we arranged the corresponding score for each grid separately, from 1 to 25 points (see <xref rid="fig3" ref-type="fig">Figure 3F</xref>). The closer the participant, the higher the score value, and vice versa (see <xref rid="fig3" ref-type="fig">Figures 3G</xref>,<xref rid="fig3" ref-type="fig">H</xref>).</p>
</sec>
<sec id="sec9">
<title>Questionnaire 2: User engagement scale-short form</title>
<p>The User Engagement Scale-SF (UES-SF) is a measure developed to evaluate user engagement and has been used in a variety of digital applications (<xref ref-type="bibr" rid="ref53">O&#x2019;brien and Toms, 2013</xref>). It is designed to measure six attributes of user engagement: aesthetic appeal, focused attention, novelty, perceived usability, sensory engagement, and persistence. The UES-SF is a short form (SF) of the User Engagement Scale (UES), shortened from 31 to 12 items, reducing the response time burden on users (<xref ref-type="bibr" rid="ref52">O&#x2019;Brien et al., 2018</xref>). It consists of the following components.</p>
<list list-type="bullet">
<list-item>
<p>FA: Focused attention, including three items.</p>
</list-item>
<list-item>
<p>PU: Perceived usability, including three items.</p>
</list-item>
<list-item>
<p>AE: Aesthetic appeal, including three items.</p>
</list-item>
<list-item>
<p>RW: Reward factor, a set of elements (three elements) consisting of EN (persistence), NO (novelty), and FI (sensory involvement) components of the User Engagement Scale.</p>
</list-item>
</list>
</sec>
</sec>
<sec id="sec10">
<title>Design</title>
<p>In the between-subjects design, we designed two conditions, and each participant was tested in only one of the conditions (see <xref rid="fig4" ref-type="fig">Figure 4</xref>). The participants were randomly assigned to two groups. These two groups were based on two conditions: the normal online exhibition experience group (NOEE group) and the group with an additional EEG signal visualization device (ESVD group). The difference between the two groups is that we added an independent variable, a media device that translates the difference in EEG signals between two participants into real-time moir&#x00E9; patterns, in the ESVD group. The total duration of the experiment was 5&#x2009;days (10:00&#x2013;18:00/day), with the NOEE group performing on Day 1&#x2013;2.5 and the ESVD group on Day 2.5&#x2013;5.</p>
<fig position="float" id="fig4">
<label>Figure 4</label>
<caption>
<p><bold>(A)</bold> The normal online exhibition experience (NOEE) group&#x2019;s overall experimental scene; <bold>(B)</bold> The experimental scene of a participant in the NOEE group; <bold>(C)</bold> The experimental scene of another participant in the NOEE group; <bold>(D)</bold> The EEG signal visualization device (ESVD) group&#x2019;s overall experimental scene; <bold>(E)</bold> The experimental scene of a participant in the ESVD group; and <bold>(F)</bold> The experimental scene of another participant in the ESVD group.</p>
</caption>
<graphic xlink:href="fpsyg-13-954803-g004.tif"/>
</fig>
</sec>
<sec id="sec11">
<title>Procedures</title>
<p>At the beginning of the experiment, the implementer explained the procedure to the participants and asked them to fill in their personal information. To capture the EEG signals in real time, the participants wore the EPOC X headset-based EEG equipment. Simultaneously, the experiment implementer used two other computers to record the captured data using EMOTIVPRO 3.0.</p>
<p>Subsequently, the participants moved on to the next task until all four tasks and questionnaires were completed within 1&#x2009;h (see <xref rid="fig5" ref-type="fig">Figure 5</xref>). The participants learned how to operate Tasks 1&#x2013;4 in online exhibitions to familiarize themselves with the specific interaction processes and used the PC&#x2019;s touch area to control the interface. The PC&#x2019;s built-<italic>in camera</italic> and Zoom were used to display images of the faces of the participants in real time during the experiment. Each PC was connected to an earphone that was used by the participants for communication. After visiting each exhibition, participants were required to answer a paper questionnaire (PDMD) and an electronic questionnaire (UES-SF) on an iPad.</p>
<fig position="float" id="fig5">
<label>Figure 5</label>
<caption>
<p>Flow diagram of the experimental procedures.</p>
</caption>
<graphic xlink:href="fpsyg-13-954803-g005.tif"/>
</fig>
<p>In contrast to the NOEE group, in the ESVD group, the experimental implementer explained the EEG visualization media device to the participants before the start of the experiment. Additionally, this media device was used as a background screen (the background in front of both participants showed the same view) during the online museum experience, which participants could freely view at any time.</p>
<p>At the end of the experiment, we asked participants to write their feedback based on their overall experience of the experiment by describing the response requirements in the relevant field in the questionnaire. The headings in the questionnaire were described in three languages: English, Japanese, and Chinese, and the participants responded in their native language. The experiment implementer assisted the participants in this process by explaining the questionnaire titles by using neutral vocabulary and did not guide the content of the participants&#x2019; responses.</p>
</sec>
<sec id="sec12">
<title>Statistical analysis</title>
<p>We used SASS version 25.0 for statistical analysis of the data collected during the experiment. We used descriptive statistics for the sample and divided the experimental data into three main parts:</p>
<list list-type="bullet">
<list-item>
<p>Questionnaire data on the characteristics of the experimental participants, including gender, duration of mutual acquaintance, and most frequently used software for online meetings.</p>
</list-item>
<list-item>
<p>Questionnaire data on the perceived psychological distance of the participants during their experience with Tasks 1&#x2013;4.</p>
</list-item>
<list-item>
<p>Questionnaire data on the user engagement of the participants during their experience with Tasks 1&#x2013;4.</p>
</list-item>
</list>
<p>We utilized the following analysis methods:</p>
<list list-type="bullet">
<list-item>
<p>Experimental participants: We analyzed the differences in age between groups using independent samples <italic>t</italic>-tests. Differences in gender, region, duration of mutual acquaintance with the other participants, and the most frequently used online meeting software were also analyzed using <italic>&#x03C7;</italic><sup>2</sup> tests with SASS version 25.0.</p>
</list-item>
<list-item>
<p>To solve R.Q.1: Regarding psychological distance during the interactions, we used one-way ANOVA and an independent samples <italic>t</italic>-test to investigate the differences among the four tasks within groups and the users between the groups.</p>
</list-item>
<list-item>
<p>To solve R.Q.2: Finally, UES-SF questionnaires were analyzed with repeated ANOVA to determine the differences in user participation between groups.</p>
</list-item>
</list>
</sec>
</sec>
<sec id="sec13" sec-type="results">
<title>Results</title>
<sec id="sec14">
<title>Feasibility and participation</title>
<p>A total sample of 40 students from the University of Tsukuba participated in this study, including 20 males, 19 females, and one non-binary gender person. All participants were East Asian (specifically, Japanese and Chinese). The NOEE group included 20 participants (12 males and eight females) with a mean age of 24.9&#x2009;&#x00B1;&#x2009;1.5&#x2009;years. In the NOEE group, 14 participants were mutually acquainted for more than 1&#x2009;year for two people involved in the same experiment (additionally, two participants were mutually acquainted for more than 6&#x2009;months less than 1&#x2009;year, two participants for more than 1&#x2009;month less than 6&#x2009;months, and two for less than 1&#x2009;month). The NOEE group included 12 participants who used Zoom most frequently for online meetings, and eight participants used Microsoft Teams. The ESVD group consisted of 20 participants (eight males, 11 female, and one non-binary gender individual) with a mean age of 24.3&#x2009;&#x00B1;&#x2009;2.3&#x2009;years. In the ESVD group, 12 participants (comprising the groups of two people involved in the same experiment) were mutually acquainted for more than 1&#x2009;year (in addition, four participants were mutually acquainted for more than 6&#x2009;months and less than 1&#x2009;year, two participants for more than 1&#x2009;month and less than 6&#x2009;months, and two participants for less than 1&#x2009;month). The ESVD group had 15 participants; they used Zoom most frequently for online meetings, and five participants used Microsoft Teams. There were no significant differences between the groups in terms of mean age (<italic>p</italic>&#x2009;=&#x2009;0.375), sex ratio (<italic>p</italic>&#x2009;=&#x2009;0.343), duration of acquaintance ratio (<italic>p</italic>&#x2009;=&#x2009;0.946), and commonly used online meeting software proportion (<italic>p</italic>&#x2009;=&#x2009;0.501). Further details are provided in <xref rid="tab1" ref-type="table">Table 1</xref>.</p>
<table-wrap position="float" id="tab1">
<label>Table 1</label>
<caption>
<p>Participant characteristics.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top" rowspan="2">Characteristics</th>
<th align="center" valign="top" rowspan="2">NOEE Group (<bold><italic>N</italic></bold>&#x2009;=&#x2009;20)</th>
<th align="center" valign="top" rowspan="2">ESVD Group (<bold><italic>N</italic></bold>&#x2009;=&#x2009;20)</th>
<th align="center" valign="top" colspan="2">Statistics</th>
</tr>
<tr>
<th align="center" valign="top">T or &#x03C7;<sup>2</sup></th>
<th align="center" valign="top"><italic>p</italic></th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top"><bold>Age mean (SD)</bold></td>
<td align="center" valign="top">24.9 (1.5)</td>
<td align="center" valign="top">24.3 (2.3)</td>
<td align="center" valign="top"><italic>T</italic> =&#x2009;0.897</td>
<td align="center" valign="top">0.375</td>
</tr>
<tr>
<td/>
<td align="center" valign="top"><bold><italic>N</italic></bold> <bold>(%)</bold></td>
<td align="center" valign="top"><bold><italic>N</italic></bold> <bold>(%)</bold></td>
<td/>
<td/>
</tr>
<tr>
<td align="left" valign="top"><bold>Gender</bold></td>
<td/>
<td/>
<td/>
<td/>
</tr>
<tr>
<td align="left" valign="top">Male</td>
<td align="center" valign="top">12 (60)</td>
<td align="center" valign="top">8 (40)</td>
<td align="center" valign="top" rowspan="3">&#x03C7;<sup>2</sup> =&#x2009;2.191</td>
<td align="center" valign="top" rowspan="3">0.343</td>
</tr>
<tr>
<td align="left" valign="top">Female</td>
<td align="center" valign="top">8 (40)</td>
<td align="center" valign="top">11 (55)</td>
</tr>
<tr>
<td align="left" valign="top">Non-binary gender</td>
<td align="center" valign="top">0 (0)</td>
<td align="center" valign="top">1 (5)</td>
</tr>
<tr>
<td align="left" valign="top"><bold>Duration of acquaintance</bold></td>
<td/>
<td/>
<td/>
<td/>
</tr>
<tr>
<td align="left" valign="top">&#x003E;1&#x2009;year</td>
<td align="center" valign="top">14 (70)</td>
<td align="center" valign="top">12 (60)</td>
<td align="center" valign="top" rowspan="4">&#x03C7;<sup>2</sup> =&#x2009;1.061</td>
<td align="center" valign="top" rowspan="4">0.946</td>
</tr>
<tr>
<td align="left" valign="top">0.5&#x2009;years to 1&#x2009;year</td>
<td align="center" valign="top">2 (10)</td>
<td align="center" valign="top">4 (20)</td>
</tr>
<tr>
<td align="left" valign="top">1&#x2009;month to 0.5&#x2009;years</td>
<td align="center" valign="top">2 (10)</td>
<td align="center" valign="top">2 (10)</td>
</tr>
<tr>
<td align="left" valign="top">1&#x2009;month&#x003E;</td>
<td align="center" valign="top">2 (10)</td>
<td align="center" valign="top">2 (10)</td>
</tr>
<tr>
<td align="left" valign="top"><bold>Online meeting software</bold></td>
<td/>
<td/>
<td/>
<td/>
</tr>
<tr>
<td align="left" valign="top">Zoom</td>
<td align="center" valign="top">12 (60)</td>
<td align="center" valign="top">15 (75)</td>
<td align="center" valign="top" rowspan="2">&#x03C7;<sup>2</sup> =&#x2009;1.026</td>
<td align="center" valign="top" rowspan="2">0.501</td>
</tr>
<tr>
<td align="left" valign="top">Teams</td>
<td align="center" valign="top">8 (40)</td>
<td align="center" valign="top">5 (25)</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p>NOEE group, online exhibition experience group; ESVD group, additional EEG signal visualization device group.</p>
</table-wrap-foot>
</table-wrap>
<p>All participants in the NOEE and ESVD groups completed the appropriate experimental processes without encountering any technical difficulties or significant participant disturbances, which might have terminated the experience. We observed the participants&#x2019; behavior and determined that all participants were focused on and motivated from the beginning to the completion of the trial.</p>
</sec>
<sec id="sec15">
<title>Evaluation of psychological distance</title>
<p>We divided the results of the participants&#x2019; psychological distance assessment into three parts:</p>
<list list-type="bullet">
<list-item>
<p>Whether there was a difference between the four tasks within the NOEE group</p>
</list-item>
<list-item>
<p>Whether there was a difference between the four tasks within the ESVD group</p>
</list-item>
<list-item>
<p>Whether there was a difference between the NOEE and ESVD groups</p>
</list-item>
</list>
<p>In the NOEE group, there were statistical differences among the four tasks within the group according to the results of the one-way ANOVA (<italic>F</italic> =&#x2009;7.473; <italic>p</italic> =&#x2009;0.000; see <xref rid="tab2" ref-type="table">Table 2</xref>). According to the results of multiple comparisons, the scores of Task3 were higher than those of the other three tasks, and there were significant differences between the scores of Task3 and Task1 (<italic>p</italic> =&#x2009;0.002), Task2 (<italic>p</italic> =&#x2009;0.000), and Task4 (<italic>p</italic> =&#x2009;0.001; <xref rid="tab3" ref-type="table">Table 3</xref>).</p>
<table-wrap position="float" id="tab2">
<label>Table 2</label>
<caption>
<p>One-way ANOVA within the NOEE group.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top"><bold>Task</bold></th>
<th align="center" valign="top"><bold><italic>N</italic></bold></th>
<th align="center" valign="top"><bold>Mean&#x2009;&#x00B1;&#x2009;SD</bold></th>
<th align="center" valign="top"><bold><italic>F</italic></bold></th>
<th align="center" valign="top"><bold><italic>p</italic></bold></th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Task1</td>
<td align="center" valign="top">20</td>
<td align="left" valign="top">17&#x2009;&#x00B1;&#x2009;4.46</td>
<td align="center" valign="top" rowspan="4">7.473</td>
<td align="center" valign="top" rowspan="4">0.000<xref rid="tfn1" ref-type="table-fn"><sup>&#x002A;</sup></xref></td>
</tr>
<tr>
<td align="left" valign="top">Task2</td>
<td align="center" valign="top">20</td>
<td align="left" valign="top">16.35&#x2009;&#x00B1;&#x2009;4.146</td>
</tr>
<tr>
<td align="left" valign="top">Task3</td>
<td align="center" valign="top">20</td>
<td align="left" valign="top">21.45&#x2009;&#x00B1;&#x2009;1.877</td>
</tr>
<tr>
<td align="left" valign="top">Task4</td>
<td align="center" valign="top">20</td>
<td align="left" valign="top">16.7&#x2009;&#x00B1;&#x2009;4.578</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="tfn1">
<label>&#x002A;</label>
<p>Significance at the 0.05 level.</p>
</fn>
</table-wrap-foot>
</table-wrap>
<table-wrap position="float" id="tab3">
<label>Table 3</label>
<caption>
<p>Multiple comparisons between Task1/2/3/4 within the NOEE group.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top"><bold>(I) Task</bold></th>
<th align="left" valign="top"><bold>(J) Task</bold></th>
<th align="center" valign="top"><bold>MD (I-J)</bold></th>
<th align="center" valign="top"><bold><italic>SD</italic></bold></th>
<th align="center" valign="top"><bold><italic>p</italic></bold></th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Task1</td>
<td align="left" valign="top">Task2</td>
<td align="left" valign="top">0.65</td>
<td align="center" valign="top">1.362</td>
<td align="center" valign="top">0.997</td>
</tr>
<tr>
<td align="left" valign="top">Task1</td>
<td align="left" valign="top">Task3</td>
<td align="left" valign="top">&#x2212;4.45</td>
<td align="center" valign="top">1.082</td>
<td align="center" valign="top">0.002<xref rid="tfn2" ref-type="table-fn"><sup>&#x002A;</sup></xref></td>
</tr>
<tr>
<td align="left" valign="top">Task1</td>
<td align="left" valign="top">Task4</td>
<td align="left" valign="top">0.3</td>
<td align="center" valign="top">1.429</td>
<td align="center" valign="top">1</td>
</tr>
<tr>
<td align="left" valign="top">Task2</td>
<td align="left" valign="top">Task3</td>
<td align="left" valign="top">&#x2212;5.1</td>
<td align="center" valign="top">1.018</td>
<td align="center" valign="top">0<xref rid="tfn2" ref-type="table-fn"><sup>&#x002A;</sup></xref></td>
</tr>
<tr>
<td align="left" valign="top">Task2</td>
<td align="left" valign="top">Task4</td>
<td align="left" valign="top">&#x2212;0.35</td>
<td align="center" valign="top">1.381</td>
<td align="center" valign="top">1</td>
</tr>
<tr>
<td align="left" valign="top">Task3</td>
<td align="left" valign="top">Task4</td>
<td align="left" valign="top">4.75</td>
<td align="center" valign="top">1.106</td>
<td align="center" valign="top">0.001<xref rid="tfn2" ref-type="table-fn"><sup>&#x002A;</sup></xref></td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="tfn2">
<label>&#x002A;</label>
<p>Significance at the 0.05 level.</p>
</fn>
</table-wrap-foot>
</table-wrap>
<p>In the ESVD group, there were significant differences between the four tasks within the group according to the results of the one-way ANOVA (<italic>F</italic> =&#x2009;4.510, <italic>p</italic> =&#x2009;0.006; see <xref rid="tab4" ref-type="table">Table 4</xref>). According to the results of multiple comparisons, the scores of Task3 were higher than those of the other three tasks, and there were significant differences between Task3 and Task1 (<italic>p</italic> =&#x2009;0.048), Task2 (<italic>p</italic> =&#x2009;0.001), and Task4 (<italic>p</italic> =&#x2009;0.021; <xref rid="tab5" ref-type="table">Table 5</xref>).</p>
<table-wrap position="float" id="tab4">
<label>Table 4</label>
<caption>
<p>One-way ANOVA within the ESVD group.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top"><bold>Task</bold></th>
<th align="center" valign="top"><bold><italic>N</italic></bold></th>
<th align="center" valign="top"><bold>Mean&#x2009;&#x00B1;&#x2009;<italic>SD</italic></bold></th>
<th align="center" valign="top"><bold><italic>F</italic></bold></th>
<th align="center" valign="top"><bold><italic>p</italic></bold></th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Task1</td>
<td align="center" valign="top">20</td>
<td align="left" valign="top">19.2&#x2009;&#x00B1;&#x2009;3.694</td>
<td align="center" valign="top" rowspan="4">4.510</td>
<td align="center" valign="top" rowspan="4">0.006<xref rid="tfn3" ref-type="table-fn"><sup>&#x002A;</sup></xref></td>
</tr>
<tr>
<td align="left" valign="top">Task2</td>
<td align="center" valign="top">20</td>
<td align="left" valign="top">18.55&#x2009;&#x00B1;&#x2009;2.964</td>
</tr>
<tr>
<td align="left" valign="top">Task3</td>
<td align="center" valign="top">20</td>
<td align="left" valign="top">21.85&#x2009;&#x00B1;&#x2009;1.981</td>
</tr>
<tr>
<td align="left" valign="top">Task4</td>
<td align="center" valign="top">20</td>
<td align="left" valign="top">18.65&#x2009;&#x00B1;&#x2009;4.043</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="tfn3">
<label>&#x002A;</label>
<p>Significance at the 0.05 level.</p>
</fn>
</table-wrap-foot>
</table-wrap>
<table-wrap position="float" id="tab5">
<label>Table 5</label>
<caption>
<p>Multiple comparisons between Task1/2/3/4 within the ESVD group.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top"><bold>(I) Task</bold></th>
<th align="left" valign="top"><bold>(J) Task</bold></th>
<th align="center" valign="top"><bold>MD (I-J)</bold></th>
<th align="center" valign="top"><bold><italic>SD</italic></bold></th>
<th align="center" valign="top"><bold><italic>p</italic></bold></th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Task1</td>
<td align="left" valign="top">Task2</td>
<td align="left" valign="top">0.65</td>
<td align="center" valign="top">1.059</td>
<td align="left" valign="top">0.989</td>
</tr>
<tr>
<td align="left" valign="top">Task1</td>
<td align="left" valign="top">Task3</td>
<td align="left" valign="top">&#x2212;2.65</td>
<td align="center" valign="top">0.937</td>
<td align="left" valign="top">0.048<xref rid="tfn4" ref-type="table-fn"><sup>&#x002A;</sup></xref></td>
</tr>
<tr>
<td align="left" valign="top">Task1</td>
<td align="left" valign="top">Task4</td>
<td align="left" valign="top">0.55</td>
<td align="center" valign="top">1.224</td>
<td align="left" valign="top">0.998</td>
</tr>
<tr>
<td align="left" valign="top">Task2</td>
<td align="left" valign="top">Task3</td>
<td align="left" valign="top">&#x2212;3.3</td>
<td align="center" valign="top">0.797</td>
<td align="left" valign="top">0.001<xref rid="tfn4" ref-type="table-fn"><sup>&#x002A;</sup></xref></td>
</tr>
<tr>
<td align="left" valign="top">Task2</td>
<td align="left" valign="top">Task4</td>
<td align="left" valign="top">&#x2212;0.1</td>
<td align="center" valign="top">1.121</td>
<td align="center" valign="top">1</td>
</tr>
<tr>
<td align="left" valign="top">Task3</td>
<td align="left" valign="top">Task4</td>
<td align="left" valign="top">3.2</td>
<td align="center" valign="top">1.007</td>
<td align="left" valign="top">0.021<xref rid="tfn4" ref-type="table-fn"><sup>&#x002A;</sup></xref></td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="tfn4">
<label>&#x002A;</label>
<p>Significance at the 0.05 level.</p>
</fn>
</table-wrap-foot>
</table-wrap>
<p>According to the results of statistical tests (independent samples <italic>t</italic>-test) between the four tasks in the NOEE and ESVD groups, there were no significant differences between Task1 in the NOEE group and Task1 in the ESVD group (<italic>p</italic>&#x2009;=&#x2009;0.098), between Task2 in the NOEE group and Task2 in the ESVD group (<italic>p</italic>&#x2009;=&#x2009;0.061), between Task3 in the NOEE group and Task3 in the ESVD group (<italic>p</italic>&#x2009;=&#x2009;0.516), and between Task4 in the NOEE group and Task4 in the ESVD group (<italic>p</italic>&#x2009;=&#x2009;0.162). After analyzing the two groups&#x2019; overall engagement (OE), the mean&#x2009;&#x00B1;&#x2009;SD was 17.88&#x2009;&#x00B1;&#x2009;4.379 for the NOEE group and 19.56&#x2009;&#x00B1;&#x2009;3.478 for the ESVD group. Furthermore, a statistical test (independent samples <italic>t</italic>-test) for these two groups yielded T&#x2009;=&#x2009;&#x2212;2.699, <italic>p</italic>&#x2009;=&#x2009;0.008, and <italic>p</italic>&#x2009;&#x003C;&#x2009;0.05. This result indicated a statistical difference between the NOEE and ESVD groups (<xref rid="tab6" ref-type="table">Table 6</xref>), where the ESVD group had higher scores than the NOEE group.</p>
<table-wrap position="float" id="tab6">
<label>Table 6</label>
<caption>
<p>Independent sample <italic>t</italic>-test for Task1/2/3/4 and overall Task between NOEE and ESVD groups.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top"><bold>Task and OT</bold></th>
<th align="center" valign="top"><bold>NOEE Group Mean&#x2009;&#x00B1;&#x2009;SD</bold></th>
<th align="center" valign="top"><bold>ESVD Group Mean&#x2009;&#x00B1;&#x2009;SD</bold></th>
<th align="center" valign="top"><bold>T</bold></th>
<th align="center" valign="top"><bold>p</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Task1</td>
<td align="left" valign="top">17&#x2009;&#x00B1;&#x2009;4.46</td>
<td align="left" valign="top">19.2&#x2009;&#x00B1;&#x2009;3.694</td>
<td align="left" valign="top">&#x2212;1.699</td>
<td align="left" valign="top">0.098</td>
</tr>
<tr>
<td align="left" valign="top">Task2</td>
<td align="left" valign="top">16.35&#x2009;&#x00B1;&#x2009;4.146</td>
<td align="left" valign="top">18.55&#x2009;&#x00B1;&#x2009;2.964</td>
<td align="left" valign="top">&#x2212;1.931</td>
<td align="left" valign="top">0.061</td>
</tr>
<tr>
<td align="left" valign="top">Task3</td>
<td align="left" valign="top">21.45&#x2009;&#x00B1;&#x2009;1.877</td>
<td align="left" valign="top">21.85&#x2009;&#x00B1;&#x2009;1.981</td>
<td align="left" valign="top">&#x2212;0.656</td>
<td align="left" valign="top">0.516</td>
</tr>
<tr>
<td align="left" valign="top">Task4</td>
<td align="left" valign="top">16.7&#x2009;&#x00B1;&#x2009;4.578</td>
<td align="left" valign="top">18.65&#x2009;&#x00B1;&#x2009;4.043</td>
<td align="left" valign="top">&#x2212;1.428</td>
<td align="left" valign="top">0.162</td>
</tr>
<tr>
<td align="left" valign="top">OT</td>
<td align="left" valign="top">17.88&#x2009;&#x00B1;&#x2009;4.379</td>
<td align="left" valign="top">19.56&#x2009;&#x00B1;&#x2009;3.478</td>
<td align="left" valign="top">&#x2212;2.699</td>
<td align="left" valign="top">0.008<xref rid="tfn5" ref-type="table-fn"><sup>&#x002A;</sup></xref></td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p>NOEE group, online exhibition experience group; ESVD group, additional EEG signal visualization device group; OT, overall task.</p>
<fn id="tfn5">
<label>&#x002A;</label>
<p>Significance at the 0.05 level.</p>
</fn>
</table-wrap-foot>
</table-wrap>
</sec>
<sec id="sec16">
<title>Evaluation of user engagement</title>
<p>We evaluated user engagement in the NOEE and ESVD groups, first by detecting within-subject effects for different indicators in the two groups, followed by comparing the two groups in pairs.</p>
<p><xref rid="tab7" ref-type="table">Table 7</xref> illustrates the repeated-measures ANOVA for all participants&#x2019; scores to compare the differences between the four tasks. The four tasks differed significantly in the PU indicator (<italic>F</italic>&#x2009;=&#x2009;4.559, <italic>p</italic>&#x2009;=&#x2009;0.034), but not in the FA (<italic>F</italic>&#x2009;=&#x2009;0.504, <italic>p</italic>&#x2009;=&#x2009;0.479), AE (<italic>F</italic>&#x2009;=&#x2009;1.968, <italic>p</italic>&#x2009;=&#x2009;0.163), and RW (<italic>F</italic>&#x2009;=&#x2009;2.587, <italic>p</italic>&#x2009;=&#x2009;0.11). <xref rid="tab8" ref-type="table">Table 8</xref> presents the paired comparisons of the overall engagement values for the two groups (NOEE and ESVD). Paired comparisons showed that participants&#x2019; overall engagement was marginally significantly higher in the ESVD group than in the NOEE group (<italic>p</italic>&#x2009;=&#x2009;0.056; 0.05&#x2009;&#x003C;&#x2009;<italic>p</italic>&#x2009;&#x003C;&#x2009;0.01).</p>
<table-wrap position="float" id="tab7">
<label>Table 7</label>
<caption>
<p>Within-subjects effect test for different indicators under the NOEE and ESVD groups.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top"><bold>UES-SF</bold></th>
<th align="center" valign="top"><bold>SS</bold></th>
<th align="center" valign="top"><bold>df</bold></th>
<th align="center" valign="top"><bold>MS</bold></th>
<th align="center" valign="top"><bold><italic>F</italic></bold></th>
<th align="center" valign="top"><bold><italic>p</italic></bold></th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Focused attention (FA)</td>
<td align="center" valign="top">1.008</td>
<td align="center" valign="top">1</td>
<td align="center" valign="top">1.008</td>
<td align="center" valign="top">0.504</td>
<td align="left" valign="top">0.479</td>
</tr>
<tr>
<td align="left" valign="top">Perceived usability (PU)</td>
<td align="center" valign="top">9.633</td>
<td align="center" valign="top">1</td>
<td align="center" valign="top">9.633</td>
<td align="center" valign="top">4.559</td>
<td align="left" valign="top">0.034<xref rid="tfn6" ref-type="table-fn"><sup>&#x002A;</sup></xref></td>
</tr>
<tr>
<td align="left" valign="top">Aesthetic appeal (AE)</td>
<td align="center" valign="top">3.169</td>
<td align="center" valign="top">1</td>
<td align="center" valign="top">3.169</td>
<td align="center" valign="top">1.968</td>
<td align="left" valign="top">0.163</td>
</tr>
<tr>
<td align="left" valign="top">Reward factor (RW)</td>
<td align="center" valign="top">3.502</td>
<td align="center" valign="top">1</td>
<td align="center" valign="top">3.502</td>
<td align="center" valign="top">2.587</td>
<td align="left" valign="top">0.11</td>
</tr>
<tr>
<td align="left" valign="top">Overall engagement (OE)</td>
<td align="center" valign="top">3.763</td>
<td align="center" valign="top">1</td>
<td align="center" valign="top">3.763</td>
<td align="center" valign="top">3.715</td>
<td align="left" valign="top">0.056</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="tfn6">
<label>&#x002A;</label>
<p>Significance at the 0.05 level.</p>
</fn>
</table-wrap-foot>
</table-wrap>
<table-wrap position="float" id="tab8">
<label>Table 8</label>
<caption>
<p>Paired comparisons for the NOEE and ESVD groups.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top"><bold>(I) Group</bold></th>
<th align="left" valign="top"><bold>(J) Group</bold></th>
<th align="center" valign="top"><bold>MD (I-J)</bold></th>
<th align="center" valign="top"><bold><italic>SD</italic></bold></th>
<th align="center" valign="top"><bold><italic>p</italic></bold></th>
</tr>
</thead>
<tbody>
<tr>
<td>NOEE Group</td>
<td>ESVD Group</td>
<td>0.177</td>
<td>0.092</td>
<td>0.056</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
</sec>
<sec id="sec17" sec-type="discussions">
<title>Discussion</title>
<p>The experimental results showed that increasing the visualization of participants&#x2019; EEG signals during the GA&#x0026;C website experience could reduce perceived psychological distance to some extent. In comparing the analysis of user engagement on the GA&#x0026;C website, we found that the ESVD group, that is, the group with added the EEG signal visualization media, had a slightly higher user engagement. Although the psychological distance is a widely used theoretical construct in literature, there is little clarity on what makes things appear to have greater or less psychological distance. Our study found that EEG signal visualization reduces the perceived psychological distance to some extent and enhances user engagement.</p>
<sec id="sec18">
<title>Social distance and technology</title>
<p>There are various ways of experiencing online exhibitions, including cell phones, tablets, VR devices, and computer screen displays (<xref ref-type="bibr" rid="ref9">Broeck et al., 2017</xref>). Different experience methods have different degrees of impact on users&#x2019; psychological distance. During the COVID-19 pandemic, emerging technologies such as tracking apps, AI, big data, 5G, drones, and robotics are being used in China and Japan (<xref ref-type="bibr" rid="ref62">Shaw et al., 2020</xref>). The high penetration rate of the Internet and smartphones, combined with the acceptance of new technologies, particularly among urban youth in these countries, may have contributed to the positive perception of the moir&#x00E9; patterns generated in this study.</p>
<p>By comparing the four exhibitions within the group, we found that different interaction methods had other effects on the psychological distance between the users. In Task3, two participants were required to work together to complete a pair of puzzles. In this task, compared with the other tasks, they could see the trajectory of each other&#x2019;s operations in real time, and the displayed operation interface was synchronized. The synchronized interface provided an additional way for participants to understand each other&#x2019;s emotions. The statistical results showed that in the NOEE and ESVD groups, the users&#x2019; psychological distance was the closest in the experience of Task3 (interactive puzzle game) than the others (see the section &#x201C;Evaluation of Psychological Distance,&#x201D; for a description of the within-group comparison results). Additionally, the participants had the highest number of conversations during the interaction of Task3, and more than half of them expressed a desire to continue experiencing Task3.</p>
<p>Synchronization is considered a joint change in psychophysiological signals when people interact intensively with each other (based on <xref ref-type="bibr" rid="ref18">Ekman et al., 2012</xref>). Such phenomena while playing games have proven to be psychologically and physically beneficial (e.g., <xref ref-type="bibr" rid="ref50">Motataianu, 2015</xref>; <xref ref-type="bibr" rid="ref59">Robinson et al., 2020</xref>). Synchronization can increase socialization capabilities through shared interactions and online experiences (e.g., <xref ref-type="bibr" rid="ref60">Sadhukhan et al., 2021</xref>). Given that art as a shared experience can also foster emotional contagion in viewers (<xref ref-type="bibr" rid="ref24">Gernot et al., 2018</xref>), combining online galleries with visualizations of psychophysiological signals (in this case, EEG) may have reinforced empathy among our participants. This could also have influenced the reduction in psychological distance.</p>
<p>The intensity of user engagement may influence users&#x2019; perceived psychological distance to some extent. Higher user engagement in the ESVD group likely reduced the perceived psychological distance. <xref ref-type="bibr" rid="ref72">Van Boven et al. (2010)</xref> showed that a reduction in objective distance and emotional intensity reduces perceived psychological distance. Emotional intensity also affects user engagement, as a good emotional design induces a sense of pleasure and security in the user, thus promoting user engagement during the experience (<xref ref-type="bibr" rid="ref42">Lee et al., 2002</xref>).</p>
</sec>
<sec id="sec19">
<title>Social distance pre- and during COVID-19</title>
<p>Some studies describe psychological distance as the degree of divergence from the direct experience of the self, here and now, along with temporal, spatial, and social viewpoints or theoretical perspectives (<xref ref-type="bibr" rid="ref70">Trope and Liberman, 2010</xref>). It is divided into dimensions such as temporal, spatial, and social viewpoints, or theoretical (<xref ref-type="bibr" rid="ref46">Liberman and Trope, 2014</xref>). <xref ref-type="bibr" rid="ref61">Scerrati et al. (2022)</xref> demonstrated that the pandemic likely drives the results of individual differences in the assessment of social proximity. Our study focuses on the &#x201C;social distance&#x201D; dimension, such as the distance between oneself and others.</p>
<p>Given that our participants were East Asians, we must discuss perceptions of social distance in these regions. In China, one regional aspect influencing social distance is <italic>guanxi</italic>, which is usually applied to business relationships, where the more the trust, the shorter is the social distance (<xref ref-type="bibr" rid="ref65">Song et al., 2012</xref>). There is also an indication that social distance is slightly greater in urban settings (<xref ref-type="bibr" rid="ref48">Ma et al., 2015</xref>). As for minorities, while the Chinese perceive some regional and international minorities as close, others are perceived far (<xref ref-type="bibr" rid="ref19">Fong and Spickard, 1994</xref>).</p>
<p>Among minorities in Japan, indigenous people were considered as closer than people from mixed nationality backgrounds (<xref ref-type="bibr" rid="ref5">Ball, 2009</xref>). In both countries, the social distance was greater when interacting with people with mental disabilities (<xref ref-type="bibr" rid="ref29">Haraguchi et al., 2009</xref>; <xref ref-type="bibr" rid="ref2">Ando et al., 2013</xref>), although physical contact was more frequent among Chinese nationals.</p>
<p>There was an indication of social isolation and depression among Japanese youth before the COVID-19 pandemic (<xref ref-type="bibr" rid="ref69">Takagi et al., 2013</xref>), and such phenomena increased during the pandemic, including the suicide rate, particularly among women (<xref ref-type="bibr" rid="ref68">Sugaya et al., 2020</xref>; <xref ref-type="bibr" rid="ref54">Osaki et al., 2021</xref>). In China, poor mental health is associated with social distancing (<xref ref-type="bibr" rid="ref25">Goodwin et al., 2021</xref>). The existing evidence suggests that such effects can be mitigated by family support (<xref ref-type="bibr" rid="ref44">Li and Xu, 2022</xref>).</p>
<p>Social distancing is usually discussed in medical terms in the literature. In contrast, our study addresses social distancing as an opportunity for university students to experience art. Many of these students move out of their homes to pursue their education, and in the case of international students, they currently face the challenge of adapting to a different country amid a global pandemic. These factors can exacerbate isolation. Thus, we propose an alternative to decreasing the perception of social distance, with the potential mitigation of isolation among East Asian students, which could aid in addressing other mental and physical health issues.</p>
<p>We discovered in the feedback that the participants in the ESVD group were more concerned about their partners with whom they experienced the online exhibition together than those in the NOEE group. We also collected textual feedback from participants in the study. <xref rid="fig6" ref-type="fig">Figure 6</xref> compares the keywords that appeared in the final feedback messages and the number of occurrences between the participants in the NOEE and ESVD groups. The percentage in the bottom-right corner of each keyword in the figure represents the percentage of participants who mentioned that keyword. The larger the value, the larger is the area shown in the figure. Overall, 36 of the 40 participants (18 in each of the two groups) submitted feedback. We found that the high-frequency word that was mentioned by five out of 18 participants and was the highest of all words in the NOEE group was &#x201C;online&#x201D; (28%); in the ESVD group, that was mentioned by seven out of 18 participants and was the highest of all words it was &#x201C;feeling&#x201D; (39%). The comparison between the two groups revealed that the words mentioned by the participants in the ESVD group were more often related to the partners who participated in the experiment, such as &#x201C;Close to&#x201D; (17%), &#x201C;Conversation&#x201D; (17%), &#x201C;Group work&#x201D; (11%), &#x201C;Partner&#x201D; (11%), &#x201C;Distance&#x201D; (11%), etc. (see <xref rid="fig6" ref-type="fig">Figure 6A</xref>). The words mentioned by the participants in the NOEE group were more related to online experiences, such as &#x201C;Museum&#x201D; (22%), &#x201C;Mouse&#x201D; (22%), &#x201C;Experience&#x201D; (17%), and &#x201C;Impression&#x201D; (17%), &#x201C;Explain&#x201D; (17%), etc. (see <xref rid="fig6" ref-type="fig">Figure 6B</xref>).</p>
<fig position="float" id="fig6">
<label>Figure 6</label>
<caption>
<p><bold>(A)</bold> High-frequency words that appeared in participants&#x2019; descriptions of the overall experience in the NOEE group (including percentages); <bold>(B)</bold> High-frequency words that appeared in participants&#x2019; descriptions of the overall experience in the ESVD group (including percentages).</p>
</caption>
<graphic xlink:href="fpsyg-13-954803-g006.tif"/>
</fig>
</sec>
<sec id="sec20">
<title>Study limitations</title>
<p>Our study has several limitations. First, the sample size was relatively small. A more extensive sample size is necessary to provide more meaningful data to evaluate the efficacy of using an EEG signal visualization system for the user experience of an online exhibition. The results of this study might have differed if the sample included older adults or people from nonurban settings.</p>
<p>The second is the relatively single mode of experience. In this study, we used the GA&#x0026;C website as a case study. The GA&#x0026;C website is mainly experienced by users through computers, which are used more frequently than VR and AR devices in daily life for most users (<xref ref-type="bibr" rid="ref71">Udell, 2019</xref>). With the evolution of virtual exhibitions, experience modalities have become more diverse. Using technologies such as Web/X3D, VR, and AR, visitors are offered the possibility of exploring virtual museums, interacting with virtual exhibits in real time, and visualizing these exhibits in contexts such as 3D gallery spaces (<xref ref-type="bibr" rid="ref56">Petridis et al., 2005</xref>; <xref ref-type="bibr" rid="ref67">Styliani et al., 2009</xref>). Additionally, the web-based form of online museums is one of the earlier developed and most popular ways to experience virtual museums; therefore, we chose this form as a way for participants to experience them in this study. Participants visited virtual museums in different modes of experience, which produced different effects. In the future, we will add to this study and continue to explore the impact of different experiential modes on participants&#x2019; psychological distance and engagement.</p>
<p>Additionally, we observed that participants did not significantly observe the state of EEG visualization images during the experience of focusing on the online exhibition, possibly because the online experience and EEG visualization were displayed on separate screens. In this case, the participants needed to raise their heads to move their eyes from the computer screen to the monitor. While the participants were fully engaged in viewing the artwork and reading information about it, their eyes were mainly focused on the computer screen. They spent less time observing the screen displaying the EEG signals. Therefore, if the EEG signal is displayed on the same screen as the artwork, participants are likely to spend more time focusing on the changes in the EEG signal, which may have a different effect on the results of the experiment.</p>
</sec>
</sec>
<sec id="sec21" sec-type="conclusions">
<title>Conclusion</title>
<p>The COVID-19 pandemic has resulted in the need to maintain social distancing in public places; thus, the distance between people has been affected. Work (<xref ref-type="bibr" rid="ref31">Hodder, 2020</xref>), study (<xref ref-type="bibr" rid="ref4">Azor&#x00ED;n, 2020</xref>; <xref ref-type="bibr" rid="ref8">Bozkurt et al., 2020</xref>), exhibition viewing (<xref ref-type="bibr" rid="ref32">Hoffman, 2020</xref>), etc., have been compulsorily shifted online to reduce the exposure of face-to-face contact. The distance between people and others has changed, becoming less perceptible and more complex. The development of remote services may change the way we perceive psychological distance. Our study reflects the &#x201C;social distance&#x201D; between people in post-pandemic situations. We also explore how the visualization of EEG signal differences in a virtual space can help reduce the psychological distance between users.</p>
<p>In this study, we developed and provided an EEG signal visualization system that provides a new way for users to experience online exhibitions and understand each other&#x2019;s emotions. Users employing the system in different spaces can learn the differences in each other&#x2019;s EEG signals through visualized images. We used printed pictures during the experiment to evoke the feeling of &#x201C;psychological distance&#x201D; between participants and others (<xref ref-type="bibr" rid="ref39">Kundr&#x00E1;t and Rojkov&#x00E1;, 2021</xref>). Within-group and between-group analyses were conducted for both groups by using independent sample <italic>t</italic>-tests and one-way ANOVA. User engagement was also investigated by using the UES-SF questionnaire, and repeated ANOVA was used to compare the differences between the two data groups. We summarize the following findings for both research questions:</p>
<list list-type="bullet">
<list-item>
<p>R.Q.1: Based on independent samples <italic>t</italic>-tests and one-way ANOVA analysis and on observations during the experimental procedure, we concluded that participants in the ESVD group perceived a significantly closer psychological distance between themselves and the participants on the opposite side than those in the NOEE group (<italic>t</italic>&#x2009;=&#x2009;&#x2212;2.699; <italic>p</italic>&#x2009;=&#x2009;0.008&#x2009;&#x003C;&#x2009;0.05); additionally, participants experienced Task3 with significantly closer psychological distance assessments than that of Task1 (<italic>p</italic>&#x2009;=&#x2009;0.002&#x2009;&#x003C;&#x2009;0.05), Task2 (<italic>p</italic>&#x2009;=&#x2009;0.000&#x2009;&#x003C;&#x2009;0.05), and Task4 (<italic>p</italic>&#x2009;=&#x2009;0.001&#x2009;&#x003C;&#x2009;0.05).</p>
</list-item>
<list-item>
<p>R.Q.2: Based on the repeated ANOVA analysis, we concluded that participants in the ESVD group had higher overall user engagement than the NOEE group, with marginal significance (<italic>p</italic>&#x2009;=&#x2009;0.056&#x2009;&#x003C;&#x2009;0.1).</p>
</list-item>
</list>
<p>In future research, we will continue to improve the presentation of EEG signal visualization. For example, we will combine the visualization with an online exhibition interface to make it easier for users to view and analyze whether the design of the visualization graphics affects the psychological distance between the users to some extent. Additionally, we intend to explore whether there are any similarities in the brain function network maps of two participants who are simultaneously experiencing the online museum; we also aim to determine whether the visualization of brainwaves affect the state of a user&#x2019;s brain functional network map to a certain extent. We will continue to analyze the raw EEG data of the participants during the experience, whether there is a link between it and EEG visualization, and whether there is a link between it and the user-perceived mental distance. Regarding users&#x2019; physiological signals, we will continue to explore what other physiological signals (e.g., heartbeat, body temperature, and eye movements) can be visualized to help users better perceive psychological distance in addition to EEG signals. Additionally, in future experiments, we will increase the diversity of the sample (participants of different ages, nationalities, occupations, etc.), and the participants will not be limited to college students.</p>
</sec>
<sec id="sec22" sec-type="data-availability">
<title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec id="sec23">
<title>Ethics statement</title>
<p>The studies involving human participants were reviewed and approved by the ethics review office of the Faculty of Library, Information and Media Science of the University of Tsukuba in Japan, the permission number is 22-4. The patients/participants provided their written informed consent to participate in this study. Written informed consent was obtained from the individual(s) for the publication of any potentially identifiable images or data included in this article.</p>
</sec>
<sec id="sec24">
<title>Author contributions</title>
<p>JL designed the study, conducted the experiments, performed the statistical analysis, analyzed and interpreted the data, created the images and tables, drafted the manuscript, and reviewed and revised the manuscript. YY participated in the design of the Moir&#x00E9; Pattern and the discussion of the EEG visualization scheme in the study. ZZ participated in the construction and implementation of the EEG visualization system. NY participated in the implementation of the experiments, assisted in the collection of data, took videos of the process, and reviewed and revised the manuscript. VX provided research guidance, wrote part of the discussion section, and critically reviewed and revised the manuscript. YO provided research guidance, research funding, experimental sites, laboratories, instrumentation, and other material resources related to the experiments. All authors contributed to the article and approved the submitted version.</p>
</sec>
<sec id="sec25" sec-type="funding-information">
<title>Funding</title>
<p>This research was supported by the Japan Science and Technology Agency, SPRING, Grant number JPMJSP2124 and Core Research for Evolutional Science and Technology, number JPMJCR1781.</p>
</sec>
<sec id="conf1" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="sec100" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
</body>
<back>
<ack>
<p>This is a short text to acknowledge the contributions of specific colleagues, institutions, or agencies that aided the efforts of the authors.</p>
</ack>
<ref-list>
<title>References</title>
<ref id="ref1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Alarcao</surname> <given-names>S. M.</given-names></name> <name><surname>Fonseca</surname> <given-names>M. J.</given-names></name></person-group> (<year>2017</year>). <article-title>Emotions recognition using EEG signals: a survey</article-title>. <source>IEEE Trans. Affect. Comput.</source> <volume>10</volume>, <fpage>374</fpage>&#x2013;<lpage>393</lpage>. doi: <pub-id pub-id-type="doi">10.1109/TAFFC.2017.2714671</pub-id></citation></ref>
<ref id="ref2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ando</surname> <given-names>S.</given-names></name> <name><surname>Yamaguchi</surname> <given-names>S.</given-names></name> <name><surname>Aoki</surname> <given-names>Y.</given-names></name> <name><surname>Thornicroft</surname> <given-names>G.</given-names></name></person-group> (<year>2013</year>). <article-title>Review of mental-health-related stigma in Japan</article-title>. <source>Psychiatry Clin. Neurosci.</source> <volume>67</volume>, <fpage>471</fpage>&#x2013;<lpage>482</lpage>. doi: <pub-id pub-id-type="doi">10.1111/pcn.12086</pub-id>, PMID: <pub-id pub-id-type="pmid">24118217</pub-id></citation></ref>
<ref id="ref3"><citation citation-type="other"><person-group person-group-type="author"><collab id="coll1">AtoZ Wiki</collab></person-group> (<year>2011</year>). Google art and culture. Available at: <ext-link xlink:href="https://atozwiki.com/Google_Art_Project" ext-link-type="uri">https://atozwiki.com/Google_Art_Project</ext-link> (Accessed May 15, 2022).</citation></ref>
<ref id="ref4"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Azor&#x00ED;n</surname> <given-names>C.</given-names></name></person-group> (<year>2020</year>). <article-title>Beyond COVID-19 supernova. Is another education coming?</article-title> <source>J. Profession. Cap. Commun.</source> <volume>5</volume>, <fpage>381</fpage>&#x2013;<lpage>390</lpage>. doi: <pub-id pub-id-type="doi">10.1108/JPCC-05-2020-0019</pub-id></citation></ref>
<ref id="ref5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ball</surname> <given-names>R.</given-names></name></person-group> (<year>2009</year>). <article-title>Social distance in Japan: an exploratory study</article-title>. <source>Michigan Sociol. Rev.</source> <volume>23</volume>, <fpage>105</fpage>&#x2013;<lpage>112</lpage>.</citation></ref>
<ref id="ref6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Blanchard</surname> <given-names>A. L.</given-names></name></person-group> (<year>2021</year>). <article-title>The effects of COVID-19 on virtual working within online groups</article-title>. <source>Group Process. Intergroup Relat.</source> <volume>24</volume>, <fpage>290</fpage>&#x2013;<lpage>296</lpage>. doi: <pub-id pub-id-type="doi">10.1177/1368430220983446</pub-id>, PMID: <pub-id pub-id-type="pmid">35411613</pub-id></citation></ref>
<ref id="ref7"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Borghini</surname> <given-names>G.</given-names></name> <name><surname>Astolfi</surname> <given-names>L.</given-names></name> <name><surname>Vecchiato</surname> <given-names>G.</given-names></name> <name><surname>Mattia</surname> <given-names>D.</given-names></name> <name><surname>Babiloni</surname> <given-names>F.</given-names></name></person-group> (<year>2014</year>). <article-title>Measuring neurophysiological signals in aircraft pilots and car drivers for the assessment of mental workload, fatigue and drowsiness</article-title>. <source>Neurosci. Biobehav. Rev.</source> <volume>44</volume>, <fpage>58</fpage>&#x2013;<lpage>75</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neubiorev.2012.10.003</pub-id>, PMID: <pub-id pub-id-type="pmid">23116991</pub-id></citation></ref>
<ref id="ref8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bozkurt</surname> <given-names>A.</given-names></name> <name><surname>Jung</surname> <given-names>I.</given-names></name> <name><surname>Xiao</surname> <given-names>J.</given-names></name> <name><surname>Vladimirschi</surname> <given-names>V.</given-names></name> <name><surname>Schuwer</surname> <given-names>R.</given-names></name> <name><surname>Egorov</surname> <given-names>G.</given-names></name> <etal/></person-group>. (<year>2020</year>). <article-title>A global outlook to the interruption of education due to COVID-19 pandemic: navigating in a time of uncertainty and crisis</article-title>. <source>Asian J. Dist. Educ.</source> <volume>15</volume>, <fpage>1</fpage>&#x2013;<lpage>126</lpage>. doi: <pub-id pub-id-type="doi">10.5281/zenodo.3878572</pub-id></citation></ref>
<ref id="ref9"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Broeck</surname> <given-names>M. V. D.</given-names></name> <name><surname>Kawsar</surname> <given-names>F.</given-names></name> <name><surname>Sch&#x00F6;ning</surname> <given-names>J.</given-names></name></person-group> (<year>2017</year>). &#x201C;It&#x2019;s all around you: exploring 360 video viewing experiences on mobile devices.&#x201D; in <italic>Proceedings of the 25th ACM International Conference on Multimedia</italic>. October, 2017; 762&#x2013;768.</citation></ref>
<ref id="ref10"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cecotti</surname> <given-names>H.</given-names></name></person-group> (<year>2011</year>). <article-title>Spelling with non-invasive brain&#x2013;computer interfaces&#x2013;current and future trends</article-title>. <source>J. Physiol.</source> <volume>105</volume>, <fpage>106</fpage>&#x2013;<lpage>114</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jphysparis.2011.08.003</pub-id>, PMID: <pub-id pub-id-type="pmid">21911058</pub-id></citation></ref>
<ref id="ref11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chang</surname> <given-names>K. E.</given-names></name> <name><surname>Chang</surname> <given-names>C. T.</given-names></name> <name><surname>Hou</surname> <given-names>H. T.</given-names></name> <name><surname>Sung</surname> <given-names>Y. T.</given-names></name> <name><surname>Chao</surname> <given-names>H. L.</given-names></name> <name><surname>Lee</surname> <given-names>C. M.</given-names></name></person-group> (<year>2014</year>). <article-title>Development and behavioral pattern analysis of a mobile guide system with augmented reality for painting appreciation instruction in an art museum</article-title>. <source>Comput. Educ.</source> <volume>71</volume>, <fpage>185</fpage>&#x2013;<lpage>197</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.compedu.2013.09.022</pub-id></citation></ref>
<ref id="ref12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chen</surname> <given-names>H.</given-names></name> <name><surname>Li</surname> <given-names>S.</given-names></name></person-group> (<year>2018</year>). <article-title>Measuring the psychological distance between an organization and its members&#x2014;The construction and validation of a new scale</article-title>. <source>Front. Psychol.</source> <volume>8</volume>:<fpage>2296</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2017.02296</pub-id>, PMID: <pub-id pub-id-type="pmid">29375427</pub-id></citation></ref>
<ref id="ref13"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Christopher</surname> <given-names>K. R.</given-names></name> <name><surname>Kapur</surname> <given-names>A.</given-names></name> <name><surname>Carnegie</surname> <given-names>D. A.</given-names></name> <name><surname>Grimshaw</surname> <given-names>G. M.</given-names></name></person-group> (<year>2013</year>). &#x201C;Implementing 3D visualizations of EEG signals in artistic applications.&#x201D; in <italic>2013 28th International Conference on Image and Vision Computing New Zealand (IVCNZ 2013),</italic> November, 2013; IEEE, 364&#x2013;369.</citation></ref>
<ref id="ref14"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Christopher</surname> <given-names>K. R.</given-names></name> <name><surname>Kapur</surname> <given-names>A.</given-names></name> <name><surname>Carnegie</surname> <given-names>D. A.</given-names></name> <name><surname>Grimshaw</surname> <given-names>G. M.</given-names></name></person-group> (<year>2014</year>). A History of Emerging Paradigms in EEG for music. In ICMC.</citation></ref>
<ref id="ref15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cincotti</surname> <given-names>F.</given-names></name> <name><surname>Mattia</surname> <given-names>D.</given-names></name> <name><surname>Aloise</surname> <given-names>F.</given-names></name> <name><surname>Bufalari</surname> <given-names>S.</given-names></name> <name><surname>Schalk</surname> <given-names>G.</given-names></name> <name><surname>Oriolo</surname> <given-names>G.</given-names></name> <etal/></person-group>. (<year>2008</year>). <article-title>Non-invasive brain&#x2013;computer interface system: towards its application as assistive technology</article-title>. <source>Brain Res. Bull.</source> <volume>75</volume>, <fpage>796</fpage>&#x2013;<lpage>803</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.brainresbull.2008.01.007</pub-id>, PMID: <pub-id pub-id-type="pmid">18394526</pub-id></citation></ref>
<ref id="ref16"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Dewey</surname> <given-names>J.</given-names></name></person-group> (<year>2018</year>). <source>Logic: The Theory of Inquiry.</source> <publisher-name>N.Y.: Holt, Rinehart and Winston.</publisher-name></citation></ref>
<ref id="ref17"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dhar</surname> <given-names>R.</given-names></name> <name><surname>Wertenbroch</surname> <given-names>K.</given-names></name></person-group> (<year>2000</year>). <article-title>Consumer choice between hedonic and utilitarian goods</article-title>. <source>J. Mark. Res.</source> <volume>37</volume>, <fpage>60</fpage>&#x2013;<lpage>71</lpage>. doi: <pub-id pub-id-type="doi">10.1509/jmkr.37.1.60.18718</pub-id>, PMID: <pub-id pub-id-type="pmid">31000222</pub-id></citation></ref>
<ref id="ref18"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ekman</surname> <given-names>I.</given-names></name> <name><surname>Chanel</surname> <given-names>G.</given-names></name> <name><surname>J&#x00E4;rvel&#x00E4;</surname> <given-names>S.</given-names></name> <name><surname>Kivikangas</surname> <given-names>J. M.</given-names></name> <name><surname>Salminen</surname> <given-names>M.</given-names></name> <name><surname>Ravaja</surname> <given-names>N.</given-names></name></person-group> (<year>2012</year>). <article-title>Social interaction in games: measuring physiological linkage and social presence</article-title>. <source>Simul. Gaming</source> <volume>43</volume>, <fpage>321</fpage>&#x2013;<lpage>338</lpage>. doi: <pub-id pub-id-type="doi">10.1177/1046878111422121</pub-id></citation></ref>
<ref id="ref19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fong</surname> <given-names>R.</given-names></name> <name><surname>Spickard</surname> <given-names>P. R.</given-names></name></person-group> (<year>1994</year>). <article-title>Ethnic relations in the People&#x2019;s republic of China: images and social distance between Han Chinese and minority and foreign nationalities</article-title>. <source>J. Northeast Asian Stud.</source> <volume>13</volume>, <fpage>26</fpage>&#x2013;<lpage>48</lpage>. doi: <pub-id pub-id-type="doi">10.1007/BF03023331</pub-id></citation></ref>
<ref id="ref20"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Frey</surname> <given-names>J.</given-names></name> <name><surname>M&#x00FC;hl</surname> <given-names>C.</given-names></name> <name><surname>Lotte</surname> <given-names>F.</given-names></name> <name><surname>Hachet</surname> <given-names>M.</given-names></name></person-group> (<year>2013</year>). Review of the use of electroencephalography as an evaluation method for human-computer interaction. arXiv [Preprint]. doi: <pub-id pub-id-type="doi">10.48550/arXiv.1311.2222</pub-id></citation></ref>
<ref id="ref21"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fu</surname> <given-names>R.</given-names></name> <name><surname>Li</surname> <given-names>Z.</given-names></name></person-group> (<year>2022</year>). <article-title>An evidence accumulation based block diagonal cluster model for intent recognition from EEG</article-title>. <source>Biomed. Signal Process. Control</source> <volume>77</volume>:<fpage>103835</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.bspc.2022.103835</pub-id></citation></ref>
<ref id="ref22"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fu</surname> <given-names>R.</given-names></name> <name><surname>Wang</surname> <given-names>Y.</given-names></name> <name><surname>Jia</surname> <given-names>C.</given-names></name></person-group> (<year>2022</year>). <article-title>A new data augmentation method for EEG features based on the hybrid model of broad-deep networks</article-title>. <source>Expert Syst. Appl.</source> <volume>202</volume>:<fpage>117386</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.eswa.2022.117386</pub-id></citation></ref>
<ref id="ref23"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gao</surname> <given-names>Z.</given-names></name> <name><surname>Wang</surname> <given-names>X.</given-names></name> <name><surname>Yang</surname> <given-names>Y.</given-names></name> <name><surname>Mu</surname> <given-names>C.</given-names></name> <name><surname>Cai</surname> <given-names>Q.</given-names></name> <name><surname>Dang</surname> <given-names>W.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>EEG-based spatio&#x2013;temporal convolutional neural network for driver fatigue evaluation</article-title>. <source>IEEE Trans, Neural Net. Learn. Syst.</source> <volume>30</volume>, <fpage>2755</fpage>&#x2013;<lpage>2763</lpage>. doi: <pub-id pub-id-type="doi">10.1109/TNNLS.2018.2886414</pub-id>, PMID: <pub-id pub-id-type="pmid">30640634</pub-id></citation></ref>
<ref id="ref24"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gernot</surname> <given-names>G.</given-names></name> <name><surname>Pelowski</surname> <given-names>M.</given-names></name> <name><surname>Leder</surname> <given-names>H.</given-names></name></person-group> (<year>2018</year>). <article-title>Empathy, Einf&#x00FC;hlung, and aesthetic experience: the effect of emotion contagion on appreciation of representational and abstract art using fEMG and SCR</article-title>. <source>Cogn. Process.</source> <volume>19</volume>, <fpage>147</fpage>&#x2013;<lpage>165</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s10339-017-0800-2</pub-id>, PMID: <pub-id pub-id-type="pmid">28314942</pub-id></citation></ref>
<ref id="ref25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Goodwin</surname> <given-names>R.</given-names></name> <name><surname>Hou</surname> <given-names>W. K.</given-names></name> <name><surname>Sun</surname> <given-names>S.</given-names></name> <name><surname>Ben-Ezra</surname> <given-names>M.</given-names></name></person-group> (<year>2021</year>). <article-title>Psychological and behavioural responses to COVID-19: a China&#x2013;Britain comparison</article-title>. <source>J. Epidemiol. Community Health</source> <volume>75</volume>, <fpage>189</fpage>&#x2013;<lpage>192</lpage>. doi: <pub-id pub-id-type="doi">10.1136/jech-2020-214453</pub-id>, PMID: <pub-id pub-id-type="pmid">32967892</pub-id></citation></ref>
<ref id="ref26"><citation citation-type="other"><person-group person-group-type="author"><collab id="coll2">Google Arts and Culture</collab></person-group> (<year>2018</year>). Bringing the world&#x2019;s art and culture online for everyone. Available at: <ext-link xlink:href="https://about.artsandculture.google.com/" ext-link-type="uri">https://about.artsandculture.google.com/</ext-link> (Accessed April 15, 2022).</citation></ref>
<ref id="ref27"><citation citation-type="other"><person-group person-group-type="author"><collab id="coll3">Google Arts and Culture</collab></person-group> (<year>2020</year>). Puzzle Party. Available at: <ext-link xlink:href="https://artsandculture.google.com/experiment/puzzle-party/EwGBPZlIzv0KRw?cp=1" ext-link-type="uri">https://artsandculture.google.com/experiment/puzzle-party/EwGBPZlIzv0KRw?cp=1</ext-link> (Accessed May 15, 2022).</citation></ref>
<ref id="ref28"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Hall</surname> <given-names>E. T.</given-names></name></person-group> (<year>1959</year>). <source>The Silent Language.</source> <publisher-name>New York: Fawcett</publisher-name>.</citation></ref>
<ref id="ref29"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Haraguchi</surname> <given-names>K.</given-names></name> <name><surname>Maeda</surname> <given-names>M.</given-names></name> <name><surname>Mei</surname> <given-names>Y. X.</given-names></name> <name><surname>Uchimura</surname> <given-names>N.</given-names></name></person-group> (<year>2009</year>). <article-title>Stigma associated with schizophrenia: cultural comparison of social distance in Japan and China</article-title>. <source>Psychiatry Clin. Neurosci.</source> <volume>63</volume>, <fpage>153</fpage>&#x2013;<lpage>160</lpage>. doi: <pub-id pub-id-type="doi">10.1111/j.1440-1819.2009.01922.x</pub-id>, PMID: <pub-id pub-id-type="pmid">19335384</pub-id></citation></ref>
<ref id="ref30"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Hensley</surname> <given-names>L.</given-names></name></person-group> (<year>2020</year>). Social distancing is out, physical distancing is in&#x2014;here&#x2019;s how to do it. Global News&#x2013;Canada (March 27, 2020).</citation></ref>
<ref id="ref31"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hodder</surname> <given-names>A.</given-names></name></person-group> (<year>2020</year>). <article-title>New technology, work and employment in the era of COVID-19: reflecting on legacies of research</article-title>. <source>N. Technol. Work. Employ.</source> <volume>35</volume>, <fpage>262</fpage>&#x2013;<lpage>275</lpage>. doi: <pub-id pub-id-type="doi">10.1111/ntwe.12173</pub-id>, PMID: <pub-id pub-id-type="pmid">32836624</pub-id></citation></ref>
<ref id="ref32"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hoffman</surname> <given-names>S. K.</given-names></name></person-group> (<year>2020</year>). <article-title>Online exhibitions during the COVID-19 pandemic</article-title>. <source>Museum Worlds</source> <volume>8</volume>, <fpage>210</fpage>&#x2013;<lpage>215</lpage>. doi: <pub-id pub-id-type="doi">10.3167/armw.2020.080115</pub-id>, PMID: <pub-id pub-id-type="pmid">35846638</pub-id></citation></ref>
<ref id="ref33"><citation citation-type="other"><person-group person-group-type="author"><collab id="coll4">International Council of Museums</collab></person-group> (<year>2021</year>). ICOM report: museums, museum professionals and Covid-19: third survey. ICOM, 15&#x2013;16. Available at: <ext-link xlink:href="https://icom.museum/wp-content/uploads/2021/07/Museums-and-Covid-19_third-ICOM-report.pdf" ext-link-type="uri">https://icom.museum/wp-content/uploads/2021/07/Museums-and-Covid-19_third-ICOM-report.pdf</ext-link></citation></ref>
<ref id="ref34"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Isaac</surname> <given-names>A.</given-names></name></person-group> (<year>2000</year>). <source>The Theory of the Moir&#x00E9; Phenomenon</source>. <publisher-loc>Dordrecht</publisher-loc>: <publisher-name>Kluwer Academic press</publisher-name>, <fpage>1</fpage>&#x2013;<lpage>8</lpage>.</citation></ref>
<ref id="ref35"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jap</surname> <given-names>B. T.</given-names></name> <name><surname>Lal</surname> <given-names>S.</given-names></name> <name><surname>Fischer</surname> <given-names>P.</given-names></name> <name><surname>Bekiaris</surname> <given-names>E.</given-names></name></person-group> (<year>2009</year>). <article-title>Using EEG spectral components to assess algorithms for detecting fatigue</article-title>. <source>Expert Syst. Appl.</source> <volume>36</volume>, <fpage>2352</fpage>&#x2013;<lpage>2359</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.eswa.2007.12.043</pub-id></citation></ref>
<ref id="ref36"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Kennicott</surname> <given-names>P.</given-names></name></person-group> (<year>2011</year>). &#x2018;National treasures: Google art project unlocks riches of world&#x2019;s galleries&#x2019;. The Washington Post. 1 February. Available at: <ext-link xlink:href="https://www.washingtonpost.com/entertainment/national-treasures-google-art-project-unlocks-riches-of-worlds-galleries/2011/02/01/ABJVe0Q_story.html" ext-link-type="uri">https://www.washingtonpost.com/entertainment/national-treasures-google-art-project-unlocks-riches-of-worlds-galleries/2011/02/01/ABJVe0Q_story.html</ext-link></citation></ref>
<ref id="ref37"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kerous</surname> <given-names>B.</given-names></name> <name><surname>Skola</surname> <given-names>F.</given-names></name> <name><surname>Liarokapis</surname> <given-names>F.</given-names></name></person-group> (<year>2018</year>). <article-title>EEG-based BCI and video games: a progress report</article-title>. <source>Virtual Reality</source> <volume>22</volume>, <fpage>119</fpage>&#x2013;<lpage>135</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s10055-017-0328-x</pub-id></citation></ref>
<ref id="ref38"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>King</surname> <given-names>E.</given-names></name> <name><surname>Smith</surname> <given-names>M. P.</given-names></name> <name><surname>Wilson</surname> <given-names>P. F.</given-names></name> <name><surname>Williams</surname> <given-names>M. A.</given-names></name></person-group> (<year>2021</year>). <article-title>Digital responses of UK Museum exhibitions to the COVID-19 crisis, march&#x2013;June 2020</article-title>. <source>Curator</source> <volume>64</volume>, <fpage>487</fpage>&#x2013;<lpage>504</lpage>. doi: <pub-id pub-id-type="doi">10.1111/cura.12413</pub-id>, PMID: <pub-id pub-id-type="pmid">34230675</pub-id></citation></ref>
<ref id="ref39"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kundr&#x00E1;t</surname> <given-names>J.</given-names></name> <name><surname>Rojkov&#x00E1;</surname> <given-names>Z.</given-names></name></person-group> (<year>2021</year>). <article-title>Psychological distance as a means of evaluation</article-title>. <source>New Ideas Psychol.</source> <volume>63</volume>:<fpage>100900</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.newideapsych.2021.100900</pub-id>, PMID: <pub-id pub-id-type="pmid">35171917</pub-id></citation></ref>
<ref id="ref40"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>LaRocco</surname> <given-names>J.</given-names></name> <name><surname>Le</surname> <given-names>M. D.</given-names></name> <name><surname>Paeng</surname> <given-names>D. G.</given-names></name></person-group> (<year>2020</year>). <article-title>A systemic review of available low-cost EEG headsets used for drowsiness detection</article-title>. <source>Front. Neuroinform.</source> <volume>14</volume>:<fpage>553352</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fninf.2020.553352</pub-id>, PMID: <pub-id pub-id-type="pmid">33178004</pub-id></citation></ref>
<ref id="ref41"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Lee</surname> <given-names>S.</given-names></name></person-group> (<year>2004</year>). Kansei design approach based on tangible interaction explicit images of 3D objects and evaluation of various behaviors related with creation/imagination by analysis of brain waves.</citation></ref>
<ref id="ref42"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Lee</surname> <given-names>S.</given-names></name> <name><surname>Harada</surname> <given-names>A.</given-names></name> <name><surname>Stappers</surname> <given-names>P. J.</given-names></name></person-group> (<year>2002</year>). <source>Pleasure With Products: Design Based on Kansei</source>. <publisher-loc>London, New York</publisher-loc>: <publisher-name>Taylor &#x0026; Francis,</publisher-name> <fpage>219</fpage>&#x2013;<lpage>229</lpage>.</citation></ref>
<ref id="ref43"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>J.</given-names></name> <name><surname>Sun</surname> <given-names>C.</given-names></name> <name><surname>Xanat</surname> <given-names>V. M.</given-names></name> <name><surname>Ochiai</surname> <given-names>Y.</given-names></name></person-group> (<year>2022a</year>). &#x201C;Electroencephalography and self-assessment evaluation of engagement with online exhibitions: case study of google arts and culture.&#x201D; in <italic>International Conference on Human-Computer Interaction</italic>. June, 2022; Springer, Cham; 316&#x2013;331.</citation></ref>
<ref id="ref44"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>S.</given-names></name> <name><surname>Xu</surname> <given-names>Q.</given-names></name></person-group> (<year>2022</year>). <article-title>Family support as a protective factor for attitudes toward social distancing and in preserving positive mental health during the COVID-19 pandemic</article-title>. <source>J. Health Psychol.</source> <volume>27</volume>, <fpage>858</fpage>&#x2013;<lpage>867</lpage>. doi: <pub-id pub-id-type="doi">10.1177/1359105320971697</pub-id>, PMID: <pub-id pub-id-type="pmid">33153342</pub-id></citation></ref>
<ref id="ref45"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>J.</given-names></name> <name><surname>Yang</surname> <given-names>Y.</given-names></name> <name><surname>Zhang</surname> <given-names>Z.</given-names></name> <name><surname>Zhao</surname> <given-names>Y.</given-names></name> <name><surname>Xanat</surname> <given-names>V. M.</given-names></name> <name><surname>Ochiai</surname> <given-names>Y.</given-names></name></person-group> (<year>2022b</year>). &#x201C;Visualizing the electroencephalography signal discrepancy when maintaining social distancing: EEG-based interactive moir&#x00E9; patterns.&#x201D; in <italic>International Conference on Human-Computer Interaction</italic>. June 26&#x2013;July 1, 2022; Springer, Cham; 185&#x2013;197</citation></ref>
<ref id="ref46"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Liberman</surname> <given-names>N.</given-names></name> <name><surname>Trope</surname> <given-names>Y.</given-names></name></person-group> (<year>2014</year>). <article-title>Traversing psychological distance</article-title>. <source>Trends Cogn. Sci.</source> <volume>18</volume>, <fpage>364</fpage>&#x2013;<lpage>369</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.tics.2014.03.001</pub-id>, PMID: <pub-id pub-id-type="pmid">24726527</pub-id></citation></ref>
<ref id="ref47"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Liu</surname> <given-names>Y.</given-names></name> <name><surname>Sourina</surname> <given-names>O.</given-names></name> <name><surname>Nguyen</surname> <given-names>M. K.</given-names></name></person-group> (<year>2010</year>). &#x201C;Real-time EEG-based human emotion recognition and visualization.&#x201D; in <italic>2010 International Conference on Cyberworlds</italic>. October, 2010; IEEE, 262&#x2013;269.</citation></ref>
<ref id="ref48"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ma</surname> <given-names>Q.</given-names></name> <name><surname>Pei</surname> <given-names>G.</given-names></name> <name><surname>Jin</surname> <given-names>J.</given-names></name></person-group> (<year>2015</year>). <article-title>What makes you generous? The influence of rural and urban rearing on social discounting in China</article-title>. <source>PLoS One</source> <volume>10</volume>:<fpage>e0133078</fpage>. doi: <pub-id pub-id-type="doi">10.1371/journal.pone.0133078</pub-id>, PMID: <pub-id pub-id-type="pmid">26172452</pub-id></citation></ref>
<ref id="ref49"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Maskeliunas</surname> <given-names>R.</given-names></name> <name><surname>Damasevicius</surname> <given-names>R.</given-names></name> <name><surname>Martisius</surname> <given-names>I.</given-names></name> <name><surname>Vasiljevas</surname> <given-names>M.</given-names></name></person-group> (<year>2016</year>). <article-title>Consumer-grade EEG devices: are they usable for control tasks?</article-title> <source>PeerJ</source> <volume>4</volume>:<fpage>e1746</fpage>. doi: <pub-id pub-id-type="doi">10.7717/peerj.1746</pub-id>, PMID: <pub-id pub-id-type="pmid">27014511</pub-id></citation></ref>
<ref id="ref50"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Motataianu</surname> <given-names>I. R.</given-names></name></person-group> (<year>2015</year>). <article-title>Parent-child connection&#x2013;emotional synchronization and playing; a possible model to combat the child's unsafe attachment</article-title>. <source>Procedia Soc. Behav. Sci.</source> <volume>180</volume>, <fpage>1178</fpage>&#x2013;<lpage>1183</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.sbspro.2015.02.238</pub-id>, PMID: <pub-id pub-id-type="pmid">12058846</pub-id></citation></ref>
<ref id="ref330"><citation citation-type="other"><person-group person-group-type="author"><collab id="coll20">Network of European Museum Organisations</collab></person-group> (<year>2020</year>).  Survey on the impact of the COVID-19 situation on museums in Europe. Available at: <ext-link xlink:href="https://www.ne-mo.org/fileadmin/Dateien/public/NEMO_documents/NEMO_COVID19_Report_12.05.2020.pdf" ext-link-type="uri">https://www.ne-mo.org/fileadmin/Dateien/public/NEMO_documents/NEMO_COVID19_Report_12.05.2020.pdf</ext-link></citation></ref>
<ref id="ref51"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Niedermeyer</surname> <given-names>E.</given-names></name> <name><surname>da Silva</surname> <given-names>F. L.</given-names></name></person-group> (<year>2005</year>). <source>Electroencephalography: Basic Principles, Clinical Applications, and Related Fields.</source> <publisher-name>Lippincott Williams and Wilkins</publisher-name>.</citation></ref>
<ref id="ref52"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>O&#x2019;Brien</surname> <given-names>H. L.</given-names></name> <name><surname>Cairns</surname> <given-names>P.</given-names></name> <name><surname>Hall</surname> <given-names>M.</given-names></name></person-group> (<year>2018</year>). <article-title>A practical approach to measuring user engagement with the refined user engagement scale (UES) and new UES short form</article-title>. <source>Int. J. Human Comput. Stud.</source> <volume>112</volume>, <fpage>28</fpage>&#x2013;<lpage>39</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.ijhcs.2018.01.004</pub-id></citation></ref>
<ref id="ref53"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>O&#x2019;brien</surname> <given-names>H. L.</given-names></name> <name><surname>Toms</surname> <given-names>E. G.</given-names></name></person-group> (<year>2013</year>). <article-title>Examining the generalizability of the user engagement scale (UES) in exploratory search</article-title>. <source>Inf. Process. Manag.</source> <volume>49</volume>, <fpage>1092</fpage>&#x2013;<lpage>1107</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.ipm.2012.08.005</pub-id></citation></ref>
<ref id="ref54"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Osaki</surname> <given-names>Y.</given-names></name> <name><surname>Otsuki</surname> <given-names>H.</given-names></name> <name><surname>Imamoto</surname> <given-names>A.</given-names></name> <name><surname>Kinjo</surname> <given-names>A.</given-names></name> <name><surname>Fujii</surname> <given-names>M.</given-names></name> <name><surname>Kuwabara</surname> <given-names>Y.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>Suicide rates during social crises: changes in the suicide rate in Japan after the great East Japan earthquake and during the COVID-19 pandemic</article-title>. <source>J. Psychiatr. Res.</source> <volume>140</volume>, <fpage>39</fpage>&#x2013;<lpage>44</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jpsychires.2021.05.035</pub-id>, PMID: <pub-id pub-id-type="pmid">34090102</pub-id></citation></ref>
<ref id="ref55"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Perra</surname> <given-names>N.</given-names></name></person-group> (<year>2021</year>). <article-title>Non-pharmaceutical interventions during the COVID-19 pandemic: a review</article-title>. <source>Phys. Rep.</source> <volume>913</volume>, <fpage>1</fpage>&#x2013;<lpage>52</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.physrep.2021.02.001</pub-id>, PMID: <pub-id pub-id-type="pmid">33612922</pub-id></citation></ref>
<ref id="ref56"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Petridis</surname> <given-names>P.</given-names></name> <name><surname>White</surname> <given-names>M.</given-names></name> <name><surname>Mourkousis</surname> <given-names>N.</given-names></name> <name><surname>Liarokapis</surname> <given-names>F.</given-names></name> <name><surname>Sifniotis</surname> <given-names>M.</given-names></name> <name><surname>Basu</surname> <given-names>A.</given-names></name> <etal/></person-group>. (<year>2005</year>). &#x201C;Exploring and interacting with virtual museums.&#x201D; in <italic>Proceedings of Computer Applications and Quantitative Methods in Archaeology (CAA)</italic>. March, 2005.</citation></ref>
<ref id="ref57"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pokhrel</surname> <given-names>S.</given-names></name> <name><surname>Chhetri</surname> <given-names>R.</given-names></name></person-group> (<year>2021</year>). <article-title>A literature review on impact of COVID-19 pandemic on teaching and learning</article-title>. <source>Higher Educ. Future</source> <volume>8</volume>, <fpage>133</fpage>&#x2013;<lpage>141</lpage>. doi: <pub-id pub-id-type="doi">10.1177/2347631120983481</pub-id>, PMID: <pub-id pub-id-type="pmid">35732562</pub-id></citation></ref>
<ref id="ref58"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Radziwill</surname> <given-names>N. M.</given-names></name> <name><surname>Benton</surname> <given-names>M. C.</given-names></name></person-group> (<year>2017</year>). Evaluating quality of chatbots and intelligent conversational agents. arXiv [Preprint]. doi: <pub-id pub-id-type="doi">10.48550/arXiv.1704.04579</pub-id></citation></ref>
<ref id="ref59"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Robinson</surname> <given-names>R. B.</given-names></name> <name><surname>Reid</surname> <given-names>E.</given-names></name> <name><surname>Fey</surname> <given-names>J. C.</given-names></name> <name><surname>Depping</surname> <given-names>A. E.</given-names></name> <name><surname>Isbister</surname> <given-names>K.</given-names></name> <name><surname>Mandryk</surname> <given-names>R. L.</given-names></name></person-group> (<year>2020</year>). &#x201C;Designing and evaluating in the same boat', A game of embodied synchronization for enhancing social play.&#x201D; in <italic>Proceedings of the 2020 CHI Conference on Human Factors in Computing Systems</italic>. April, 2020; 1&#x2013;14.</citation></ref>
<ref id="ref60"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sadhukhan</surname> <given-names>S.</given-names></name> <name><surname>Chattopadhyay</surname> <given-names>R.</given-names></name> <name><surname>Chakraborty</surname> <given-names>S.</given-names></name></person-group> (<year>2021</year>). <article-title>Cooperators overcome migration dilemma through synchronization</article-title>. <source>Phys. Rev. Res.</source> <volume>3</volume>:<fpage>013009</fpage>. doi: <pub-id pub-id-type="doi">10.1103/PhysRevResearch.3.013009</pub-id></citation></ref>
<ref id="ref61"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Scerrati</surname> <given-names>E.</given-names></name> <name><surname>D&#x2019;Ascenzo</surname> <given-names>S.</given-names></name> <name><surname>Nicoletti</surname> <given-names>R.</given-names></name> <name><surname>Lugli</surname> <given-names>L.</given-names></name></person-group> (<year>2022</year>). <article-title>Assessing interpersonal proximity evaluation in the Covid-19 era: evidence from the affective priming task</article-title>. <source>Front. Psychol.</source> <volume>13</volume>:<fpage>901730</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2022.901730</pub-id>, PMID: <pub-id pub-id-type="pmid">35783734</pub-id></citation></ref>
<ref id="ref62"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Shaw</surname> <given-names>R.</given-names></name> <name><surname>Kim</surname> <given-names>Y. K.</given-names></name> <name><surname>Hua</surname> <given-names>J.</given-names></name></person-group> (<year>2020</year>). <article-title>Governance, technology and citizen behavior in pandemic: lessons from COVID-19 in East Asia</article-title>. <source>Progress Disaster Sci.</source> <volume>6</volume>:<fpage>100090</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.pdisas.2020.100090</pub-id>, PMID: <pub-id pub-id-type="pmid">34171010</pub-id></citation></ref>
<ref id="ref63"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Shenghua</surname> <given-names>J.</given-names></name></person-group> (<year>1997</year>). <article-title>Interpersonal space and interpersonal interaction</article-title>. <source>Sociol. Res.</source> <volume>1</volume>, <fpage>118</fpage>&#x2013;<lpage>119</lpage>.</citation></ref>
<ref id="ref64"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Shu</surname> <given-names>L.</given-names></name> <name><surname>Xie</surname> <given-names>J.</given-names></name> <name><surname>Yang</surname> <given-names>M.</given-names></name> <name><surname>Li</surname> <given-names>Z.</given-names></name> <name><surname>Li</surname> <given-names>Z.</given-names></name> <name><surname>Liao</surname> <given-names>D.</given-names></name> <etal/></person-group>. (<year>2018</year>). <article-title>A review of emotion recognition using physiological signals</article-title>. <source>Sensors</source> <volume>18</volume>:<fpage>2074</fpage>. doi: <pub-id pub-id-type="doi">10.3390/s18072074</pub-id>, PMID: <pub-id pub-id-type="pmid">29958457</pub-id></citation></ref>
<ref id="ref65"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Song</surname> <given-names>F.</given-names></name> <name><surname>Cadsby</surname> <given-names>C. B.</given-names></name> <name><surname>Bi</surname> <given-names>Y.</given-names></name></person-group> (<year>2012</year>). <article-title>Trust, reciprocity, and guanxi in China: an experimental investigation</article-title>. <source>Manag. Organ. Rev.</source> <volume>8</volume>, <fpage>397</fpage>&#x2013;<lpage>421</lpage>. doi: <pub-id pub-id-type="doi">10.1111/j.1740-8784.2011.00272.x</pub-id></citation></ref>
<ref id="ref66"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Spillmann</surname> <given-names>L.</given-names></name></person-group> (<year>1993</year>). <article-title>The perception of movement and depth in Moir&#x00E9; patterns</article-title>. <source>Perception</source> <volume>22</volume>, <fpage>287</fpage>&#x2013;<lpage>308</lpage>. doi: <pub-id pub-id-type="doi">10.1068/p220287</pub-id>, PMID: <pub-id pub-id-type="pmid">8316516</pub-id></citation></ref>
<ref id="ref67"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Styliani</surname> <given-names>S.</given-names></name> <name><surname>Fotis</surname> <given-names>L.</given-names></name> <name><surname>Kostas</surname> <given-names>K.</given-names></name> <name><surname>Petros</surname> <given-names>P.</given-names></name></person-group> (<year>2009</year>). <article-title>Virtual museums, a survey and some issues for consideration</article-title>. <source>J. Cult. Herit.</source> <volume>10</volume>, <fpage>520</fpage>&#x2013;<lpage>528</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.culher.2009.03.003</pub-id></citation></ref>
<ref id="ref68"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sugaya</surname> <given-names>N.</given-names></name> <name><surname>Yamamoto</surname> <given-names>T.</given-names></name> <name><surname>Suzuki</surname> <given-names>N.</given-names></name> <name><surname>Uchiumi</surname> <given-names>C.</given-names></name></person-group> (<year>2020</year>). <article-title>A real-time survey on the psychological impact of mild lockdown for COVID-19 in the Japanese population</article-title>. <source>Scientific data</source> <volume>7</volume>, <fpage>1</fpage>&#x2013;<lpage>6</lpage>. doi: <pub-id pub-id-type="doi">10.1038/s41597-020-00714-9</pub-id></citation></ref>
<ref id="ref69"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Takagi</surname> <given-names>D.</given-names></name> <name><surname>Kondo</surname> <given-names>K.</given-names></name> <name><surname>Kondo</surname> <given-names>N.</given-names></name> <name><surname>Cable</surname> <given-names>N.</given-names></name> <name><surname>Ikeda</surname> <given-names>K. I.</given-names></name> <name><surname>Kawachi</surname> <given-names>I.</given-names></name></person-group> (<year>2013</year>). <article-title>Social disorganization/social fragmentation and risk of depression among older people in Japan: multilevel investigation of indices of social distance</article-title>. <source>Soc. Sci. Med.</source> <volume>83</volume>, <fpage>81</fpage>&#x2013;<lpage>89</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.socscimed.2013.01.001</pub-id>, PMID: <pub-id pub-id-type="pmid">23333256</pub-id></citation></ref>
<ref id="ref70"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Trope</surname> <given-names>Y.</given-names></name> <name><surname>Liberman</surname> <given-names>N.</given-names></name></person-group> (<year>2010</year>). <article-title>Construal-level theory of psychological distance</article-title>. <source>Psychol. Rev.</source> <volume>117</volume>, <fpage>440</fpage>&#x2013;<lpage>463</lpage>. doi: <pub-id pub-id-type="doi">10.1037/a0018963</pub-id>, PMID: <pub-id pub-id-type="pmid">20438233</pub-id></citation></ref>
<ref id="ref71"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Udell</surname> <given-names>M. K.</given-names></name></person-group> (<year>2019</year>). The museum of the infinite scroll: assessing the effectiveness of google arts and culture as a virtual tool for museum accessibility.</citation></ref>
<ref id="ref72"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Van Boven</surname> <given-names>L.</given-names></name> <name><surname>Kane</surname> <given-names>J.</given-names></name> <name><surname>McGraw</surname> <given-names>A. P.</given-names></name> <name><surname>Dale</surname> <given-names>J.</given-names></name></person-group> (<year>2010</year>). <article-title>Feeling close: emotional intensity reduces perceived psychological distance</article-title>. <source>J. Pers. Soc. Psychol.</source> <volume>98</volume>, <fpage>872</fpage>&#x2013;<lpage>885</lpage>. doi: <pub-id pub-id-type="doi">10.1037/a0019262</pub-id>, PMID: <pub-id pub-id-type="pmid">20515244</pub-id></citation></ref>
<ref id="ref73"><citation citation-type="other"><person-group person-group-type="author"><name><surname>Venske</surname> <given-names>R.</given-names></name></person-group> (<year>2020</year>). &#x201C;Die Wirkung von Sprache in Krisenzeiten&#x201D;[The effect of language in times of crisis](interview). NDR Kultur (in German). Norddeutscher Rundfunk. (NB. Regula Venske is president of the PEN Centre Germany.) ed. A. Schwyzer.</citation></ref>
<ref id="ref74"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Weinberg</surname> <given-names>H.</given-names></name></person-group> (<year>2020</year>). <article-title>Online group psychotherapy: challenges and possibilities during COVID-19&#x2014;A practice review</article-title>. <source>Group Dyn. Theory Res. Pract.</source> <volume>24</volume>, <fpage>201</fpage>&#x2013;<lpage>211</lpage>. doi: <pub-id pub-id-type="doi">10.1037/gdn0000140</pub-id></citation></ref>
<ref id="ref75"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wu</surname> <given-names>X.</given-names></name> <name><surname>Li</surname> <given-names>Y.</given-names></name></person-group> (<year>2022</year>). <article-title>Experience mode of digital media art under virtual reality technology</article-title>. <source>Appl. Bionics Biomechan.</source> <volume>2022</volume>, <fpage>1</fpage>&#x2013;<lpage>6</lpage>. doi: <pub-id pub-id-type="doi">10.1155/2022/5117150</pub-id></citation></ref>
</ref-list>
</back>
</article>